21:46:58.864 pool-1-thread-1 DEBUG ShutdownHookManager: Adding shutdown hook 21:46:58.962 pool-1-thread-1 DEBUG Shell: Failed to detect a valid hadoop home directory java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. at org.apache.hadoop.util.Shell.checkHadoopHomeInner(Shell.java:468) at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:439) at org.apache.hadoop.util.Shell.(Shell.java:516) at org.apache.hadoop.util.StringUtils.(StringUtils.java:78) at org.apache.hadoop.conf.Configuration.getTimeDurationHelper(Configuration.java:1814) at org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1791) at org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183) at org.apache.hadoop.util.ShutdownHookManager$HookEntry.(ShutdownHookManager.java:207) at org.apache.hadoop.util.ShutdownHookManager.addShutdownHook(ShutdownHookManager.java:302) at org.apache.spark.util.SparkShutdownHookManager.install(ShutdownHookManager.scala:181) at org.apache.spark.util.ShutdownHookManager$.shutdownHooks$lzycompute(ShutdownHookManager.scala:50) at org.apache.spark.util.ShutdownHookManager$.shutdownHooks(ShutdownHookManager.scala:48) at org.apache.spark.util.ShutdownHookManager$.addShutdownHook(ShutdownHookManager.scala:153) at org.apache.spark.util.ShutdownHookManager$.(ShutdownHookManager.scala:58) at org.apache.spark.util.ShutdownHookManager$.(ShutdownHookManager.scala) at org.apache.spark.util.Utils$.createTempDir(Utils.scala:325) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$init$(SharedThriftServer.scala:41) at org.apache.spark.sql.hive.thriftserver.ThriftServerQueryTestSuite.(ThriftServerQueryTestSuite.scala:55) at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) at java.lang.reflect.Constructor.newInstance(Constructor.java:423) at java.lang.Class.newInstance(Class.java:442) at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:448) at sbt.ForkMain$Run$2.call(ForkMain.java:296) at sbt.ForkMain$Run$2.call(ForkMain.java:286) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 21:46:58.974 pool-1-thread-1 DEBUG Shell: setsid exited with exit code 0 21:46:59.403 pool-1-thread-1 INFO SparkContext: Running Spark version 3.1.0-SNAPSHOT 21:46:59.465 pool-1-thread-1 DEBUG MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[Rate of successful kerberos logins and latency (milliseconds)]) 21:46:59.473 pool-1-thread-1 DEBUG MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[Rate of failed kerberos logins and latency (milliseconds)]) 21:46:59.474 pool-1-thread-1 DEBUG MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[GetGroups]) 21:46:59.474 pool-1-thread-1 DEBUG MutableMetricsFactory: field private org.apache.hadoop.metrics2.lib.MutableGaugeLong org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailuresTotal with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[Renewal failures since startup]) 21:46:59.474 pool-1-thread-1 DEBUG MutableMetricsFactory: field private org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[Renewal failures since last successful login]) 21:46:59.476 pool-1-thread-1 DEBUG MetricsSystemImpl: UgiMetrics, User and group related metrics 21:46:59.496 pool-1-thread-1 DEBUG SecurityUtil: Setting hadoop.security.token.service.use_ip to true 21:46:59.519 pool-1-thread-1 DEBUG Groups: Creating new Groups object 21:46:59.521 pool-1-thread-1 DEBUG NativeCodeLoader: Trying to load the custom-built native-hadoop library... 21:46:59.522 pool-1-thread-1 DEBUG NativeCodeLoader: Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path 21:46:59.522 pool-1-thread-1 DEBUG NativeCodeLoader: java.library.path=/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib 21:46:59.522 pool-1-thread-1 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 21:46:59.523 pool-1-thread-1 DEBUG PerformanceAdvisory: Falling back to shell based 21:46:59.524 pool-1-thread-1 DEBUG JniBasedUnixGroupsMappingWithFallback: Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping 21:46:59.595 pool-1-thread-1 DEBUG Groups: Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000 21:46:59.602 pool-1-thread-1 DEBUG UserGroupInformation: hadoop login 21:46:59.602 pool-1-thread-1 DEBUG UserGroupInformation: hadoop login commit 21:46:59.605 pool-1-thread-1 DEBUG UserGroupInformation: using local user:UnixPrincipal: jenkins 21:46:59.605 pool-1-thread-1 DEBUG UserGroupInformation: Using user: "UnixPrincipal: jenkins" with name jenkins 21:46:59.605 pool-1-thread-1 DEBUG UserGroupInformation: User entry: "jenkins" 21:46:59.605 pool-1-thread-1 DEBUG UserGroupInformation: UGI loginUser:jenkins (auth:SIMPLE) 21:46:59.629 pool-1-thread-1 INFO ResourceUtils: ============================================================== 21:46:59.629 pool-1-thread-1 INFO ResourceUtils: No custom resources configured for spark.driver. 21:46:59.630 pool-1-thread-1 INFO ResourceUtils: ============================================================== 21:46:59.630 pool-1-thread-1 INFO SparkContext: Submitted application: test-sql-context 21:46:59.657 pool-1-thread-1 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 21:46:59.677 pool-1-thread-1 INFO ResourceProfile: Limiting resource is cpu 21:46:59.678 pool-1-thread-1 INFO ResourceProfileManager: Added ResourceProfile id: 0 21:46:59.742 pool-1-thread-1 INFO SecurityManager: Changing view acls to: jenkins 21:46:59.742 pool-1-thread-1 INFO SecurityManager: Changing modify acls to: jenkins 21:46:59.743 pool-1-thread-1 INFO SecurityManager: Changing view acls groups to: 21:46:59.743 pool-1-thread-1 INFO SecurityManager: Changing modify acls groups to: 21:46:59.744 pool-1-thread-1 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); groups with view permissions: Set(); users with modify permissions: Set(jenkins); groups with modify permissions: Set() 21:46:59.868 pool-1-thread-1 DEBUG InternalLoggerFactory: Using SLF4J as the default logging framework 21:46:59.869 pool-1-thread-1 DEBUG InternalThreadLocalMap: -Dio.netty.threadLocalMap.stringBuilder.initialSize: 1024 21:46:59.870 pool-1-thread-1 DEBUG InternalThreadLocalMap: -Dio.netty.threadLocalMap.stringBuilder.maxSize: 4096 21:46:59.883 pool-1-thread-1 DEBUG MultithreadEventLoopGroup: -Dio.netty.eventLoopThreads: 64 21:46:59.908 pool-1-thread-1 DEBUG NioEventLoop: -Dio.netty.noKeySetOptimization: false 21:46:59.908 pool-1-thread-1 DEBUG NioEventLoop: -Dio.netty.selectorAutoRebuildThreshold: 512 21:46:59.927 pool-1-thread-1 DEBUG PlatformDependent0: -Dio.netty.noUnsafe: false 21:46:59.927 pool-1-thread-1 DEBUG PlatformDependent0: Java version: 8 21:46:59.928 pool-1-thread-1 DEBUG PlatformDependent0: sun.misc.Unsafe.theUnsafe: available 21:46:59.929 pool-1-thread-1 DEBUG PlatformDependent0: sun.misc.Unsafe.copyMemory: available 21:46:59.929 pool-1-thread-1 DEBUG PlatformDependent0: java.nio.Buffer.address: available 21:46:59.929 pool-1-thread-1 DEBUG PlatformDependent0: direct buffer constructor: available 21:46:59.930 pool-1-thread-1 DEBUG PlatformDependent0: java.nio.Bits.unaligned: available, true 21:46:59.930 pool-1-thread-1 DEBUG PlatformDependent0: jdk.internal.misc.Unsafe.allocateUninitializedArray(int): unavailable prior to Java9 21:46:59.930 pool-1-thread-1 DEBUG PlatformDependent0: java.nio.DirectByteBuffer.(long, int): available 21:46:59.930 pool-1-thread-1 DEBUG PlatformDependent: sun.misc.Unsafe: available 21:46:59.931 pool-1-thread-1 DEBUG PlatformDependent: -Dio.netty.tmpdir: /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/target/tmp/org.apache.spark.sql.hive.thriftserver.ThriftServerQueryTestSuite (java.io.tmpdir) 21:46:59.931 pool-1-thread-1 DEBUG PlatformDependent: -Dio.netty.bitMode: 64 (sun.arch.data.model) 21:46:59.932 pool-1-thread-1 DEBUG PlatformDependent: -Dio.netty.maxDirectMemory: 3817865216 bytes 21:46:59.932 pool-1-thread-1 DEBUG PlatformDependent: -Dio.netty.uninitializedArrayAllocationThreshold: -1 21:46:59.933 pool-1-thread-1 DEBUG CleanerJava6: java.nio.ByteBuffer.cleaner(): available 21:46:59.933 pool-1-thread-1 DEBUG PlatformDependent: -Dio.netty.noPreferDirect: false 21:46:59.942 pool-1-thread-1 DEBUG PlatformDependent: org.jctools-core.MpscChunkedArrayQueue: available 21:46:59.962 pool-1-thread-1 DEBUG ResourceLeakDetector: -Dio.netty.leakDetection.level: simple 21:46:59.962 pool-1-thread-1 DEBUG ResourceLeakDetector: -Dio.netty.leakDetection.targetRecords: 4 21:46:59.966 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.numHeapArenas: 37 21:46:59.967 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.numDirectArenas: 37 21:46:59.967 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.pageSize: 8192 21:46:59.967 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.maxOrder: 11 21:46:59.967 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.chunkSize: 16777216 21:46:59.967 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.tinyCacheSize: 512 21:46:59.967 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.smallCacheSize: 256 21:46:59.967 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.normalCacheSize: 64 21:46:59.967 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.maxCachedBufferCapacity: 32768 21:46:59.967 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.cacheTrimInterval: 8192 21:46:59.968 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.cacheTrimIntervalMillis: 0 21:46:59.968 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.useCacheForAllThreads: true 21:46:59.968 pool-1-thread-1 DEBUG PooledByteBufAllocator: -Dio.netty.allocator.maxCachedByteBuffersPerChunk: 1023 21:47:00.015 pool-1-thread-1 DEBUG DefaultChannelId: -Dio.netty.processId: 4834 (auto-detected) 21:47:00.018 pool-1-thread-1 DEBUG NetUtil: -Djava.net.preferIPv4Stack: false 21:47:00.019 pool-1-thread-1 DEBUG NetUtil: -Djava.net.preferIPv6Addresses: false 21:47:00.021 pool-1-thread-1 DEBUG NetUtil: Loopback interface: lo (lo, 0:0:0:0:0:0:0:1%lo) 21:47:00.022 pool-1-thread-1 DEBUG NetUtil: /proc/sys/net/core/somaxconn: 128 21:47:00.024 pool-1-thread-1 DEBUG DefaultChannelId: -Dio.netty.machineId: 00:1e:67:ff:fe:39:ae:40 (auto-detected) 21:47:00.056 pool-1-thread-1 DEBUG ByteBufUtil: -Dio.netty.allocator.type: pooled 21:47:00.056 pool-1-thread-1 DEBUG ByteBufUtil: -Dio.netty.threadLocalDirectBufferSize: 0 21:47:00.056 pool-1-thread-1 DEBUG ByteBufUtil: -Dio.netty.maxThreadLocalCharBufferSize: 16384 21:47:00.074 pool-1-thread-1 DEBUG TransportServer: Shuffle server started on port: 40983 21:47:00.085 pool-1-thread-1 INFO Utils: Successfully started service 'sparkDriver' on port 40983. 21:47:00.087 pool-1-thread-1 DEBUG SparkEnv: Using serializer: class org.apache.spark.serializer.JavaSerializer 21:47:00.123 pool-1-thread-1 INFO SparkEnv: Registering MapOutputTracker 21:47:00.124 pool-1-thread-1 DEBUG MapOutputTrackerMasterEndpoint: init 21:47:00.159 pool-1-thread-1 INFO SparkEnv: Registering BlockManagerMaster 21:47:00.164 pool-1-thread-1 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 21:47:00.165 pool-1-thread-1 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 21:47:00.169 pool-1-thread-1 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 21:47:00.183 pool-1-thread-1 INFO DiskBlockManager: Created local directory at /home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/target/tmp/org.apache.spark.sql.hive.thriftserver.ThriftServerQueryTestSuite/blockmgr-247b6605-c30e-40b4-a3be-5dd859f5a018 21:47:00.185 pool-1-thread-1 DEBUG DiskBlockManager: Adding shutdown hook 21:47:00.212 pool-1-thread-1 INFO MemoryStore: MemoryStore started with capacity 2.1 GiB 21:47:00.234 pool-1-thread-1 INFO SparkEnv: Registering OutputCommitCoordinator 21:47:00.234 pool-1-thread-1 DEBUG OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: init 21:47:00.381 pool-1-thread-1 INFO Executor: Starting executor ID driver on host amp-jenkins-worker-04.amp 21:47:00.408 pool-1-thread-1 DEBUG TransportServer: Shuffle server started on port: 34717 21:47:00.408 pool-1-thread-1 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 34717. 21:47:00.408 pool-1-thread-1 INFO NettyBlockTransferService: Server created on amp-jenkins-worker-04.amp:34717 21:47:00.410 pool-1-thread-1 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 21:47:00.417 pool-1-thread-1 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, amp-jenkins-worker-04.amp, 34717, None) 21:47:00.420 dispatcher-BlockManagerMaster DEBUG DefaultTopologyMapper: Got a request for amp-jenkins-worker-04.amp 21:47:00.421 dispatcher-BlockManagerMaster INFO BlockManagerMasterEndpoint: Registering block manager amp-jenkins-worker-04.amp:34717 with 2.1 GiB RAM, BlockManagerId(driver, amp-jenkins-worker-04.amp, 34717, None) 21:47:00.426 pool-1-thread-1 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, amp-jenkins-worker-04.amp, 34717, None) 21:47:00.427 pool-1-thread-1 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, amp-jenkins-worker-04.amp, 34717, None) 21:47:00.653 pool-1-thread-1 DEBUG log: Logging to org.slf4j.impl.Log4jLoggerAdapter(org.eclipse.jetty.util.log) via org.eclipse.jetty.util.log.Slf4jLog 21:47:00.655 pool-1-thread-1 INFO log: Logging initialized @4489ms to org.eclipse.jetty.util.log.Slf4jLog 21:47:00.682 pool-1-thread-1 DEBUG DecoratedObjectFactory: Adding Decorator: org.eclipse.jetty.util.DeprecationWarning@87a5774 21:47:00.691 pool-1-thread-1 DEBUG ContainerLifeCycle: o.e.j.s.ServletContextHandler@114f9c65{/,null,UNAVAILABLE} added {ServletHandler@47ca5477{STOPPED},MANAGED} 21:47:00.735 pool-1-thread-1 DEBUG SparkContext: Adding shutdown hook 21:47:01.119 pool-1-thread-1 DEBUG FileSystem: Loading filesystems 21:47:01.133 pool-1-thread-1 DEBUG FileSystem: file:// = class org.apache.hadoop.fs.LocalFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hadoop/hadoop-common/jars/hadoop-common-3.2.0.jar 21:47:01.140 pool-1-thread-1 DEBUG FileSystem: viewfs:// = class org.apache.hadoop.fs.viewfs.ViewFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hadoop/hadoop-common/jars/hadoop-common-3.2.0.jar 21:47:01.144 pool-1-thread-1 DEBUG FileSystem: har:// = class org.apache.hadoop.fs.HarFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hadoop/hadoop-common/jars/hadoop-common-3.2.0.jar 21:47:01.146 pool-1-thread-1 DEBUG FileSystem: http:// = class org.apache.hadoop.fs.http.HttpFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hadoop/hadoop-common/jars/hadoop-common-3.2.0.jar 21:47:01.147 pool-1-thread-1 DEBUG FileSystem: https:// = class org.apache.hadoop.fs.http.HttpsFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hadoop/hadoop-common/jars/hadoop-common-3.2.0.jar 21:47:01.157 pool-1-thread-1 DEBUG FileSystem: hdfs:// = class org.apache.hadoop.hdfs.DistributedFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-client/jars/hadoop-hdfs-client-3.2.0.jar 21:47:01.168 pool-1-thread-1 DEBUG FileSystem: webhdfs:// = class org.apache.hadoop.hdfs.web.WebHdfsFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-client/jars/hadoop-hdfs-client-3.2.0.jar 21:47:01.169 pool-1-thread-1 DEBUG FileSystem: swebhdfs:// = class org.apache.hadoop.hdfs.web.SWebHdfsFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-client/jars/hadoop-hdfs-client-3.2.0.jar 21:47:01.171 pool-1-thread-1 DEBUG FileSystem: nullscan:// = class org.apache.hadoop.hive.ql.io.NullScanFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hive/hive-exec/jars/hive-exec-2.3.7-core.jar 21:47:01.172 pool-1-thread-1 DEBUG FileSystem: file:// = class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem from /home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hive/hive-exec/jars/hive-exec-2.3.7-core.jar 21:47:01.172 pool-1-thread-1 DEBUG FileSystem: Looking for FS supporting file 21:47:01.172 pool-1-thread-1 DEBUG FileSystem: looking for configuration option fs.file.impl 21:47:01.183 pool-1-thread-1 DEBUG FileSystem: Filesystem file defined in configuration option 21:47:01.183 pool-1-thread-1 DEBUG FileSystem: FS for file is class org.apache.spark.DebugFilesystem 21:47:01.185 pool-1-thread-1 INFO SharedState: loading hive config file: file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive/target/scala-2.12/test-classes/hive-site.xml 21:47:01.185 pool-1-thread-1 DEBUG FsUrlStreamHandlerFactory: Creating handler for protocol file 21:47:01.185 pool-1-thread-1 DEBUG FileSystem: Looking for FS supporting file 21:47:01.185 pool-1-thread-1 DEBUG FileSystem: looking for configuration option fs.file.impl 21:47:01.186 pool-1-thread-1 DEBUG FsUrlStreamHandlerFactory: Creating handler for protocol file 21:47:01.186 pool-1-thread-1 DEBUG FileSystem: Looking for FS supporting file 21:47:01.186 pool-1-thread-1 DEBUG FileSystem: looking for configuration option fs.file.impl 21:47:01.186 pool-1-thread-1 DEBUG FileSystem: Looking in service filesystems for implementation class 21:47:01.186 pool-1-thread-1 DEBUG FileSystem: FS for file is class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 21:47:01.186 pool-1-thread-1 DEBUG FsUrlStreamHandlerFactory: Found implementation of file: class org.apache.hadoop.hive.ql.io.ProxyLocalFileSystem 21:47:01.186 pool-1-thread-1 DEBUG FsUrlStreamHandlerFactory: Using handler for protocol file 21:47:01.195 pool-1-thread-1 DEBUG FileSystem: Filesystem file defined in configuration option 21:47:01.195 pool-1-thread-1 DEBUG FileSystem: FS for file is class org.apache.spark.DebugFilesystem 21:47:01.196 pool-1-thread-1 DEBUG FsUrlStreamHandlerFactory: Found implementation of file: class org.apache.spark.DebugFilesystem 21:47:01.196 pool-1-thread-1 DEBUG FsUrlStreamHandlerFactory: Using handler for protocol file 21:47:01.208 pool-1-thread-1 INFO SharedState: Setting hive.metastore.warehouse.dir ('null') to the value of spark.sql.warehouse.dir ('file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/spark-warehouse/org.apache.spark.sql.hive.thriftserver.ThriftServerQueryTestSuite'). 21:47:01.209 pool-1-thread-1 INFO SharedState: Warehouse path is 'file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/spark-warehouse/org.apache.spark.sql.hive.thriftserver.ThriftServerQueryTestSuite'. 21:47:02.232 pool-1-thread-1 DEBUG GenerateUnsafeProjection: code for knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._1,staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, scala.Tuple2, true]))._2, true, false): /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private boolean resultIsNull_0; /* 009 */ private boolean globalIsNull_0; /* 010 */ private java.lang.String[] mutableStateArray_0 = new java.lang.String[1]; /* 011 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 012 */ /* 013 */ public SpecificUnsafeProjection(Object[] references) { /* 014 */ this.references = references; /* 015 */ /* 016 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); /* 017 */ /* 018 */ } /* 019 */ /* 020 */ public void initialize(int partitionIndex) { /* 021 */ /* 022 */ } /* 023 */ /* 024 */ // Scala.Function1 need this /* 025 */ public java.lang.Object apply(java.lang.Object row) { /* 026 */ return apply((InternalRow) row); /* 027 */ } /* 028 */ /* 029 */ public UnsafeRow apply(InternalRow i) { /* 030 */ mutableStateArray_1[0].reset(); /* 031 */ /* 032 */ /* 033 */ mutableStateArray_1[0].zeroOutNullBytes(); /* 034 */ /* 035 */ boolean isNull_3 = i.isNullAt(0); /* 036 */ scala.Tuple2 value_3 = isNull_3 ? /* 037 */ null : ((scala.Tuple2)i.get(0, null)); /* 038 */ if (isNull_3) { /* 039 */ throw new NullPointerException(((java.lang.String) references[0] /* errMsg */)); /* 040 */ } /* 041 */ boolean isNull_0 = true; /* 042 */ int value_0 = -1; /* 043 */ if (!false) { /* 044 */ /* 045 */ isNull_0 = false; /* 046 */ if (!isNull_0) { /* 047 */ /* 048 */ Object funcResult_0 = null; /* 049 */ funcResult_0 = value_3._1(); /* 050 */ value_0 = (Integer) funcResult_0; /* 051 */ /* 052 */ } /* 053 */ } /* 054 */ mutableStateArray_1[0].write(0, value_0); /* 055 */ /* 056 */ UTF8String value_9 = StaticInvoke_0(i); /* 057 */ if (globalIsNull_0) { /* 058 */ mutableStateArray_1[0].setNullAt(1); /* 059 */ } else { /* 060 */ mutableStateArray_1[0].write(1, value_9); /* 061 */ } /* 062 */ return (mutableStateArray_1[0].getRow()); /* 063 */ } /* 064 */ /* 065 */ /* 066 */ private UTF8String StaticInvoke_0(InternalRow i) { /* 067 */ resultIsNull_0 = false; /* 068 */ if (!resultIsNull_0) { /* 069 */ boolean isNull_8 = i.isNullAt(0); /* 070 */ scala.Tuple2 value_8 = isNull_8 ? /* 071 */ null : ((scala.Tuple2)i.get(0, null)); /* 072 */ if (isNull_8) { /* 073 */ throw new NullPointerException(((java.lang.String) references[1] /* errMsg */)); /* 074 */ } /* 075 */ boolean isNull_5 = true; /* 076 */ java.lang.String value_5 = null; /* 077 */ if (!false) { /* 078 */ /* 079 */ isNull_5 = false; /* 080 */ if (!isNull_5) { /* 081 */ /* 082 */ Object funcResult_1 = null; /* 083 */ funcResult_1 = value_8._2(); /* 084 */ /* 085 */ if (funcResult_1 != null) { /* 086 */ value_5 = (java.lang.String) funcResult_1; /* 087 */ } else { /* 088 */ isNull_5 = true; /* 089 */ } /* 090 */ /* 091 */ /* 092 */ } /* 093 */ } /* 094 */ resultIsNull_0 = isNull_5; /* 095 */ mutableStateArray_0[0] = value_5; /* 096 */ } /* 097 */ /* 098 */ boolean isNull_4 = resultIsNull_0; /* 099 */ UTF8String value_4 = null; /* 100 */ if (!resultIsNull_0) { /* 101 */ value_4 = or21:47:08.134 dispatcher-event-loop-1 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! 21:47:08.145 pool-1-thread-1-ScalaTest-running-HiveMetastoreLazyInitializationSuite INFO MemoryStore: MemoryStore cleared 21:47:08.146 pool-1-thread-1-ScalaTest-running-HiveMetastoreLazyInitializationSuite INFO BlockManager: BlockManager stopped 21:47:08.149 pool-1-thread-1-ScalaTest-running-HiveMetastoreLazyInitializationSuite INFO BlockManagerMaster: BlockManagerMaster stopped 21:47:08.152 dispatcher-event-loop-1 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! 21:47:08.156 pool-1-thread-1-ScalaTest-running-HiveMetastoreLazyInitializationSuite INFO SparkContext: Successfully stopped SparkContext 21:47:08.157 pool-1-thread-1-ScalaTest-running-HiveMetastoreLazyInitializationSuite INFO HiveMetastoreLazyInitializationSuite: ===== FINISHED o.a.s.sql.hive.HiveMetastoreLazyInitializationSuite: 'lazily initialize Hive client' ===== 21:47:08.175 pool-1-thread-1 WARN HiveMetastoreLazyInitializationSuite: ===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.hive.HiveMetastoreLazyInitializationSuite, thread names: rpc-boss-3-1, shuffle-boss-6-1 ===== 21:47:08.188 pool-1-thread-1 WARN SparkSession: An existing Spark session exists as the active or default session. This probably means another suite leaked it. Attempting to stop it before continuing. This existing Spark session was created at: org.apache.spark.sql.hive.HiveMetastoreLazyInitializationSuite.$anonfun$new$1(HiveMetastoreLazyInitializationSuite.scala:31) org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) org.scalatest.Transformer.apply(Transformer.scala:22) org.scalatest.Transformer.apply(Transformer.scala:20) org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:157) org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184) org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196) org.scalatest.SuperEngine.runTestImpl(Engine.scala:286) org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196) org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178) org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:59) org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221) org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214) org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:59) org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229) org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393) scala.collection.immutable.List.foreach(List.scala:392) 21:47:08.188 pool-1-thread-1 INFO SparkContext: SparkContext already stopped. 21:47:08.191 pool-1-thread-1 INFO SparkContext: Running Spark version 3.1.0-SNAPSHOT 21:47:08.192 pool-1-thread-1 INFO ResourceUtils: ============================================================== 21:47:08.192 pool-1-thread-1 INFO ResourceUtils: No custom resources configured for spark.driver. 21:47:08.192 pool-1-thread-1 INFO ResourceUtils: ============================================================== 21:47:08.192 pool-1-thread-1 INFO SparkContext: Submitted application: test-sql-context 21:47:08.193 pool-1-thread-1 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0) 21:47:08.193 pool-1-thread-1 INFO ResourceProfile: Limiting resource is cpu 21:47:08.193 pool-1-thread-1 INFO ResourceProfileManager: Added ResourceProfile id: 0 21:47:08.194 pool-1-thread-1 INFO SecurityManager: Changing view acls to: jenkins 21:47:08.194 pool-1-thread-1 INFO SecurityManager: Changing modify acls to: jenkins 21:47:08.194 pool-1-thread-1 INFO SecurityManager: Changing view acls groups to: 21:47:08.194 pool-1-thread-1 INFO SecurityManager: Changing modify acls groups to: 21:47:08.194 pool-1-thread-1 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); groups with view permissions: Set(); users with modify permissions: Set(jenkins); groups with modify permissions: Set() 21:47:08.213 pool-1-thread-1 DEBUG TransportServer: Shuffle server started on port: 38658 21:47:08.214 pool-1-thread-1 INFO Utils: Successfully started service 'sparkDriver' on port 38658. 21:47:08.214 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: org.apache.spark.serializer.JavaSerializer 21:47:08.214 pool-1-thread-1 DEBUG SparkEnv: Using serializer: class org.apache.spark.serializer.JavaSerializer 21:47:08.217 pool-1-thread-1 INFO SparkEnv: Registering MapOutputTracker 21:47:08.217 pool-1-thread-1 DEBUG MapOutputTrackerMasterEndpoint: init 21:47:08.217 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: org.apache.spark.shuffle.sort.SortShuffleManager 21:47:08.218 pool-1-thread-1 INFO SparkEnv: Registering BlockManagerMaster 21:47:08.218 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: org.apache.spark.storage.DefaultTopologyMapper 21:47:08.218 pool-1-thread-1 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information 21:47:08.218 pool-1-thread-1 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up 21:47:08.218 pool-1-thread-1 INFO SparkEnv: Registering BlockManagerMasterHeartbeat 21:47:08.219 pool-1-thread-1 INFO DiskBlockManager: Created local directory at /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/blockmgr-d8bf4bd0-3fc8-45fe-a23d-c5678dbeb299 21:47:08.220 pool-1-thread-1 DEBUG DiskBlockManager: Adding shutdown hook 21:47:08.220 pool-1-thread-1 INFO MemoryStore: MemoryStore started with capacity 2.1 GiB 21:47:08.222 pool-1-thread-1 INFO SparkEnv: Registering OutputCommitCoordinator 21:47:08.222 pool-1-thread-1 DEBUG OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: init 21:47:08.239 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: org.apache.spark.shuffle.sort.io.LocalDiskShuffleDataIO 21:47:08.242 pool-1-thread-1 INFO Executor: Starting executor ID driver on host amp-jenkins-worker-04.amp 21:47:08.246 pool-1-thread-1 DEBUG TransportServer: Shuffle server started on port: 33470 21:47:08.246 pool-1-thread-1 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 33470. 21:47:08.246 pool-1-thread-1 INFO NettyBlockTransferService: Server created on amp-jenkins-worker-04.amp:33470 21:47:08.246 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: org.apache.spark.storage.RandomBlockReplicationPolicy 21:47:08.246 pool-1-thread-1 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 21:47:08.246 pool-1-thread-1 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, amp-jenkins-worker-04.amp, 33470, None) 21:47:08.247 dispatcher-BlockManagerMaster DEBUG DefaultTopologyMapper: Got a request for amp-jenkins-worker-04.amp 21:47:08.248 dispatcher-BlockManagerMaster INFO BlockManagerMasterEndpoint: Registering block manager amp-jenkins-worker-04.amp:33470 with 2.1 GiB RAM, BlockManagerId(driver, amp-jenkins-worker-04.amp, 33470, None) 21:47:08.248 pool-1-thread-1 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, amp-jenkins-worker-04.amp, 33470, None) 21:47:08.248 pool-1-thread-1 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, amp-jenkins-worker-04.amp, 33470, None) 21:47:08.248 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: org.apache.spark.metrics.sink.MetricsServlet 21:47:08.249 pool-1-thread-1 DEBUG DecoratedObjectFactory: Adding Decorator: org.eclipse.jetty.util.DeprecationWarning@153e5bc8 21:47:08.249 pool-1-thread-1 DEBUG ContainerLifeCycle: o.e.j.s.ServletContextHandler@6abd9e89{/,null,UNAVAILABLE} added {ServletHandler@278d2854{STOPPED},MANAGED} 21:47:08.256 pool-1-thread-1 DEBUG SparkContext: Adding shutdown hook 21:47:08.260 pool-1-thread-1 INFO ThriftServerWithSparkContextInBinarySuite: Trying to start HiveThriftServer2: mode=binary, attempt=0 21:47:08.260 pool-1-thread-1 INFO SharedState: loading hive config file: file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive/target/scala-2.12/test-classes/hive-site.xml 21:47:08.271 pool-1-thread-1 INFO SharedState: Setting hive.metastore.warehouse.dir ('null') to the value of spark.sql.warehouse.dir ('file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/spark-warehouse/org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite'). 21:47:08.272 pool-1-thread-1 INFO SharedState: Warehouse path is 'file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/spark-warehouse/org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite'. 21:47:08.273 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: org.apache.spark.sql.internal.SessionStateBuilder 21:47:08.279 pool-1-thread-1 INFO HiveUtils: Initializing execution hive, version 2.3.7 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for all properties in config... 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.reducers.bytes.per.reducer 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.client.capability.check 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.storeManagerType 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.aux.jars.path 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.aggregate.stats.false.positive.probability 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.stagingdir 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.default.partition.name 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for mapreduce.input.fileinputformat.split.minsize.per.rack 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.event.expiry.duration 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.broker.address.default 21:47:08.318 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.mode.local.auto.input.files.max 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.key.prefix 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.orc.time.counters 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orc.splits.ms.footer.cache.ppd.enabled 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.task.scale.memory.reserve-fraction.min 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.execution.mapjoin.native.fast.hashtable.enabled 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.skewjoin.compiletime 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.smbjoin.cache.rows 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.execution.mapjoin.overflow.repeated.threshold 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.event.message.factory 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.metrics.enabled 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.hs2.user.access 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.post.hooks 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for mapreduce.input.fileinputformat.split.minsize 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.zookeeper.quorum 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for stream.stderr.reporter.prefix 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.storage.storageDirectory 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.command.whitelist 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.zk.sm.connectionString 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.join.emit.interval 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.am.liveness.connection.timeout.ms 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.dynamic.semijoin.reduction.threshold 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.client.connect.retry.limit 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.xmx.headroom 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.prewarm.enabled 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.dynamic.semijoin.reduction 21:47:08.319 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.allocator.direct 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.io.rcfile.record.buffer.size 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.default.rcfile.serde 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.management.acl.blocked 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.schema.validateConstraints 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.users.in.admin.role 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authorization.createtable.owner.grants 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.multi.insert.move.tasks.share.dependencies 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.max.partition.factor 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.auto.enforce.stats 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.log.explain.output 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.skewjoin 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.default.fileformat 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.client.consistent.splits 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.optimized.hashtable.wbsize 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.tez.session.lifetime 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.metastore.authorization.auth.reads 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.remove.identity.project 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.timedout.txn.reaper.start 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.cache.ttl 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.infer.bucket.sort.num.buckets.power.two 21:47:08.320 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.worker.threads 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.management.acl 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.client.future.timeout 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.max.idle.time 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.cookie.auth.enabled 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.worker.keepalive.time 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.delegation.token.lifetime 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.archive.intermediate.archived 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.authentication.ldap.guidKey 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.input.format 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.ats.hook.queue.capacity 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.strict.checks.large.query 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.bigtable.minsize.semijoin.reduction 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.localize.resource.num.wait.attempts 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.limit.optimize.enable 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authorization.createtable.role.grants 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.decode.partition.name 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.exponential.backoff.slot.length 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.execution.mapjoin.native.enabled 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compat 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.allocator.alloc.min 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.smallfiles.avgsize 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.client.user 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.metadata.password 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.encode.alloc.size 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hbase.wal.enabled 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.logging.operation.enabled 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.lockmgr.zookeeper.default.partition.name 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.wait.queue.comparator.class.name 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.support.concurrency 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.output.service.port 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orc.cache.use.soft.references 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.file.max.footer 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.encode.enabled 21:47:08.321 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.mode.prefix 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cli.print.header 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.task.scale.memory.reserve.fraction.max 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.tasklog.debug.timeout 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hashtable.loadfactor 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapred.local.mem 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.communicator.listener.thread-count 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.client.drop.partitions.using.expressions 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.script.auto.progress 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.dynamic.partition 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.container.max.java.heap.fraction 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.column.autogather 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.reducededuplication 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.am.liveness.heartbeat.interval.ms 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.index.filter.compact.minsize 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.decoding.metrics.percentiles.intervals 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.copyfile.maxsize 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.execution.enabled 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authorization.manager 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.groupby.position.alias 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.in.test 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.txn.store.impl 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.clean.until 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.hybridgrace.hashtable 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.reliable 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.use.groupby.shuffle 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.object.cache.enabled 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.map.groupby.sorted 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hashtable.initialCapacity 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.idle.operation.timeout 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.costmodel.hdfs.read 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.client.server.connect.timeout 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.parallel.ops.in.session 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.transport.mode 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.path 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.groupby.limit.extrastep 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.use.ssl 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.mode.nosamplelist 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.sasl.qop 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.delta.num.threshold 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.log4j.file 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.map.aggr.hash.percentmemory 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.job.debug.capture.stacktraces 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.server.max.message.size 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cluster.delegation.token.store.zookeeper.acl 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.service.metrics.file.location 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.sample.seednumber 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.client.retry.delay.seconds 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapred.reduce.tasks.speculative.execution 21:47:08.322 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.materializedview.fileformat 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.groupby.flush.percent 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.limit.optimize.fetch.max 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.num.file.cleaner.threads 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.fail.compaction 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.parallel 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.submitviachild 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.blobstore.use.blobstore.as.scratchdir 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.service.metrics.class 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.udtf.auto.progress 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.archive.enabled 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.convert.join.bucket.mapjoin.tez 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.execution.engine 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.basePersistDirectory 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.allocator.mmap.path 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.download.permanent.fns 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.container.size 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.max.historic.queries 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.use.SSL 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.execution.reducesink.new.enabled 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.null.scan 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.max.num.delta 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.history.retention.attempted 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.smalltable.filesize 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.query.string 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.port 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.convert.join.use.nonstaged 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.keytab.file 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.initiator.failed.compacts.threshold 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.idle.session.check.operation 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.hybridgrace.minnumpartitions 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.keystore.path 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.server.tcp.keepalive 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.service.metrics.reporter 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.client.rpc.threads 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.io.rcfile.column.number.conf 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.map.aggr.hash.min.reduction 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.costmodel.cpu 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.zookeeper.clean.extra.nodes 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.metadataonly 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.output.service.max.pending.writes 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.insert.into.multilevel.dirs 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.added.archives.path 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hmshandler.retry.attempts 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.execution.mode 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.query.lifetime.hooks 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.enable.grace.join.in.llap 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.conf.restricted.list 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.fetch.task.aggr 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.convert.sortmerge.join.to.mapjoin 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.limittranspose 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.memory.mode 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.warehouse.subdir.inherit.perms 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.progress.timeout 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.returnpath.hiveop 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.select.threshold 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.threadpool.size 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.scratchdir 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.keystore.password 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.scratchdir.lock 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.debug.localtask 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.use.spnego 21:47:08.323 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authorization.createtable.user.grants 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server.tcp.keepalive 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.service.metrics.file.frequency 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.ppd 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.script.maxerrsize 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.client.connect.timeout 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.session.id 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.convert.join.noconditionaltask 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.input.format 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.fetch.column.stats 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.cleaner.run.interval 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.skewjoin.mapjoin.map.tasks 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.hs2.coordinator.enabled 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for mapreduce.job.reduces 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.schema.verification.record.version 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.scheduler.timeout.seconds 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.support.quoted.identifiers 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.filter.stats.reduction 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.initiator.on 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.fs.handler.class 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authorization.task.factory 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.typecheck.on.insert 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.distinct.rewrite 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.authorization.storage.checks 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.orc.base.delta.ratio 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.fastpath 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.clear.dangling.scratchdir 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.builtin.udf.blacklist 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.fail.heartbeater 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.schema.validateTables 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.file.cleanup.delay.seconds 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.management.rpc.port 21:47:08.324 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.ppd.storage 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.costmodel.local.fs.read 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.hybridgrace.bloomfilter 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.max.full 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authorization.enabled 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.correlation 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.user.install.directory 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.insert.into.external.tables 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.groupby.checkinterval 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cli.print.current.db 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.txn.retryable.sqlex.regex 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.io.exception.handlers 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.jobname.length 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.auto.enforce.tree 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.stats.ndv.tuner 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.added.jars.path 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.direct.sql.max.query.length 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.bind.host 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.tez.initialize.default.sessions 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.history.retention.failed 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.txn.max.open.batch 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.check.interval 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.close.session.on.disconnect 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.ppd.windowing 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.query.id 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.transactional.table.scan 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.current.database 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.max.variable.length 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.start.cleanup.scratchdir 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.rcfile.use.explicit.header 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.initial.metadata.count.enabled 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.orc.split.strategy 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.async.exec.keepalive.time 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.listbucketing 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.default.serde 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.zookeeper.connection.basesleeptime 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.host 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.ds.connection.url.hook 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.query.result.fileformat 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.partition.name.whitelist.pattern 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.constant.propagation 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.transform.escape.input 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orc.splits.ms.footer.cache.enabled 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.point.lookup.min 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.conf.validation 21:47:08.325 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.pre.hooks 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.script.operator.id.env.var 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.added.files.path 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for javax.jdo.option.Multithreaded 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.file.metadata.threads 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.rework.mapredwork 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.client.connect.retry.delay 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.reducers.max 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.service.refresh.interval.sec 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.fetch.task.conversion.threshold 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.limit.row.max.size 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.thrift.compact.protocol.enabled 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.auto.max.output.size 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.client.rpc.server.address 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.convert.join.noconditionaltask.size 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.script.operator.truncate.env 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.join.cache.size 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.driver.parallel.compilation 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.skewjoin.key 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.rdbms.initializeColumnInfo 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.remote.token.requires.signing 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.reloadable.aux.jars.path 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.bucket.pruning 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.cache.allow.synthetic.fileid 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hash.table.inflation.factor 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hmshandler.retry.interval 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.aggr.stats.hbase.ttl 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.local.scratchdir 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.max.message.size 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.job.credential.provider.path 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.gather.num.threads 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.archive.intermediate.original 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.mode.local.auto.inputbytes.max 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.auto.enforce.vectorized 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.localtask.max.memory.usage 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.writeset.reaper.interval 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.use.vector.serde.deserialize 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.order.columnalignment 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hbase.snapshot.restoredir 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.output.service.send.buffer.size 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compute.splits.in.am 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.keystore.password 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.max.partitions 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.worker.timeout 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.tez.sessions.restricted.configs 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.authentication.kerberos.principal 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.new.job.grouping.set.cardinality 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.schema.evolution 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.enforce.sortmergebucketmapjoin 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.direct.sql.max.elements.values.clause 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.smb.number.waves 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.max.writer.wait 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.auto.allow.uber 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.llap.concurrent.queries 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.keystore.path 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for stream.stderr.reporter.enabled 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.indexer.partition.size.max 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.auto.auth 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.repl.task.factory 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.int.timestamp.conversion.in.seconds 21:47:08.326 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.auto.reducer.parallelism 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.rawstore.impl 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.metastore.authorization.manager 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orc.splits.include.fileid 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.jar.path 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.communicator.num.threads 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orderby.position.alias 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.communicator.connection.sleep.between.retries.ms 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.aggregate.stats.max.partitions 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.execution.mapjoin.native.multikey.only.enabled 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.limit.query.max.table.partition 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.service.metrics.hadoop2.component 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.yarn.shuffle.port 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.logging.operation.level 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.cache.level2.type 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.stats.ndv.densityfunction 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.direct.sql.max.elements.in.clause 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.direct.sql.batch.size 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.passiveWaitTimeMs 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.load.dynamic.partitions.thread 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.exec.print.summary 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.compress.intermediate 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.expression.proxy 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.script.recordreader 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.autogather 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.sort.dynamic.partition 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.thrift.framed.transport.enabled 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.execution.reduce.groupby.enabled 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.dbclass 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.indexer.segments.granularity 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.response.header.size 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.conf.internal.variable.list 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.concatenate.check.index 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.client.rpc.server.port 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.rcfile.use.sync.cache 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.connectionPoolingType 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.cache.pinobjtypes 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.limittranspose.reductionpercentage 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.fileformat.check 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.default.aggregator 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.repl.cm.enabled 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.keystore.path 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.client.retry.limit 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.resultset.serialize.in.tasks 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cluster.delegation.token.store.zookeeper.connectString 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.infer.bucket.sort 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.ttl 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.index.compact.file 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.submit.local.task.via.child 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cluster.delegation.token.store.zookeeper.znode 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.query.timeout.seconds 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.service.metrics.hadoop2.frequency 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orc.splits.directory.batch.ms 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.cache.max.reader.wait 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.followby.map.aggr.hash.percentmemory 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.lock.manager 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.exec.inplace.progress 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.variable.substitute.depth 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapper.cannot.span.multiple.partitions 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.table.parameters.default 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.sampling.orderby.percent 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.ignore.mapjoin.hint 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.lock.mapred.only.operation 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.min.partition.factor 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.kerberos.keytab.file 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.groupby.mapaggr.checkinterval 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for javax.jdo.option.ConnectionUserName 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.scheduler.node.reenable.max.timeout.ms 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.max.open.txns 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.convert.sortmerge.join.reduce.side 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.costmodel.extended 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.script.operator.env.blacklist 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.webui.spnego.principal 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.in.tez.test 21:47:08.327 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.bucketmapjoin.sortedmerge 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.zookeeper.publish.configs 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.schema.autoCreateAll 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.index.groupby 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.convert.sortmerge.join 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.convert.join.hashtable.max.entries 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.truststore.path 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.tez.sessions.init.threads 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authorization.createtable.group.grants 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.authorization.storage.check.externaltable.drop 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.zk.registry.user 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.rcfile.block.level 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.execution.mode 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.cnf.maxnodes 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.adaptor.usage.mode 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.materializedview.rewriting 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.log.level 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.mapfiles 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.client.socket.lifetime 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for fs.har.impl 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.max.variance 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.authentication.ldap.groupMembershipKey 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.catalog.cache.size 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.hybridgrace.memcheckfrequency 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.index.filter.compact.maxsize 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.counters.pull.interval 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.show.warnings 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.end.function.listeners 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.downloaded.resources.dir 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.fshandler.threads 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compute.query.using.stats 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.lazysimple.extended_boolean_literal 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orc.splits.include.file.footer 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.error.on.empty.partition 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.max.bloom.filter.entries 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hadoop.bin.path 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.metadata.fraction 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.materializedview.serde 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.autogen.columnalias.prefix.includefuncname 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.port 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.task.scheduler.wait.queue.size 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.aggr.stats.cache.entries 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.max.created.files 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cli.prompt 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.deserialization.factor 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metadata.export.location 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.txn.operational.properties 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.aggr.stats.memory.ttl 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for javax.jdo.option.NonTransactionalRead 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.rpc.port 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.nonvector.wrapper.enabled 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exim.strict.repl.tables 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.aggregate.stats.cache.size 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.collect.tablekeys 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.use.vectorized.input.format 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.cte.materialize.threshold 21:47:08.328 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.display.partition.cols.separately 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.disallow.incompatible.col.type.changes 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.async.exec.shutdown.timeout 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.dummystats.aggregator 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.mode 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.querylog.enable.plan.progress 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.cache.clean.until 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.warehouse.dir 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.semijoin.conversion 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.collect.scancols 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.dummystats.publisher 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.port 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.dynamic.partition.pruning 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.cookie.is.httponly 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.uris 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.querylog.location 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.querylog.plan.progress.interval 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.job.debug.timeout 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.partition.inherit.table.properties 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cluster.delegation.token.store.class 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.metrics.enabled 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.repl.rootdir 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.cookie.max.age 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.zookeeper.client.port 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.alias 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.tez.default.queues 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapred.partitioner 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.async.log.enabled 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.limit.partition.request 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.logger 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.entity.capture.transform 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.allow.udf.load.on.demand 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.index.blockfilter.file 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cli.tez.session.async 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.table.type.mapping 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.event.db.listener.timetolive 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.filter.hook 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.union.remove 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.global.init.file.location 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.am-reporter.max.threads 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.bloom.filter.factor 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.failure.retries 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.try.direct.sql 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.intermediate.compression.type 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hbase.generatehfiles 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.analyze.stmt.collect.partlevel.stats 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.join.factor 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.pre.event.listeners 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.map.fair.scheduler.queue 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.localize.resource.wait.interval 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.use.file.size.for.mapjoin 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.sasl.enabled 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.strict.checks.bucketing 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.rpc.query.plan 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.truststore.password 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.bucket.pruning.compat 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.spnego.principal 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.mapredfiles 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cache.expr.evaluation 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for yarn.bin.path 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.counters.group.name 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.transactionIsolation 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.spnego.keytab 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.groupby.skewindata 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.batch.retrieve.max 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.entity.separator 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.binary.record.max.length 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.max.dynamic.partitions 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.check.memory.rows 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.task.preemption.metrics.intervals 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.shuffle.dir.watcher.enabled 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.allocator.arena.count 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.use.SSL 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.communicator.connection.timeout.ms 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.execute.setugi 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.index.compact.query.max.entries 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.transpose.aggr.join 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for mapreduce.input.fileinputformat.split.maxsize 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.bucket.cache.size 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.maxTries 21:47:08.329 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.drop.ignorenonexistent 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.metadata.base 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.dynamic.partition.pruning.max.data.size 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.aggr.stats.invalidator.frequency 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.serdes.using.metastore.for.schema 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.allocator.mmap 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.use.lrfu 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.coordinator.address.default 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.resultset.max.fetch.size 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.sparkfiles 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exim.uri.scheme.whitelist 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.conf.hidden.list 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.query.redactor.hooks 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.log4j.file 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.io.sarg.cache.max.weight.mb 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.plan 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.clear.dangling.scratchdir.interval 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.script.serde 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.sleep.time 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.ddl.createtablelike.properties.whitelist 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for mapreduce.input.fileinputformat.split.minsize.per.node 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.bucketmapjoin 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.principal 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.use.row.serde.deserialize 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.compile.lock.timeout 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.execution.mapjoin.minmax.enabled 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.event.clean.freq 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.session.hook 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.convert.sortmerge.join.bigtable.selection.policy 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stageid.rearrange 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.timedout.txn.reaper.interval 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.temporary.table.storage 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.webui.spnego.keytab 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.groupby.maxentries 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.optimized.hashtable 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.aggregate.stats.max.variance 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authenticator.manager 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.client.stats.publishers 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.io.rcfile.record.interval 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.lrfu.lambda 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.fetch.task.conversion 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.builtin.udf.whitelist 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.metadata.db.type 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.client.rpc.max.size 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.authentication.spnego.principal 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.authz.sstd.hs2.mode 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.async.exec.threads 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.output.stream.timeout 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for javax.jdo.option.ConnectionPassword 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.transactional.events.mem 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.resultset.default.fetch.size 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.zk.sm.keytab.file 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.size 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.session.silent 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.repl.cm.retain 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.min.worker.threads 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.cardinality.check 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.metadata.uri 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.authentication.ldap.groupClassKey 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.tez.sessions.per.default.queue 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.point.lookup 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.port 21:47:08.330 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.allow.permanent.fns 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.web.ssl 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.logging.operation.log.location 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for javax.jdo.option.ConnectionURL 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.semantic.analyzer.hook 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hmshandler.force.reload.conf 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.job.queue 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.schema.validateColumns 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.prewarm.numcontainers 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.identifierFactory 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cli.errors.ignore 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.multigroupby.singlereducer 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.txn.manager.dump.lock.state.on.acquire.timeout 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.history.retention.succeeded 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.sampling.orderby.number 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.txn.timeout 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.fetch.partition.stats 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.use.fileid.path 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.server.max.threads 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.limit.optimize.limit.file 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.script.allow.partial.consumption 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.try.direct.sql.ddl 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.zookeeper.namespace 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.hybridgrace.minwbsize 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.long.polling.timeout 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.worker.keepalive.time 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.acl.blocked 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.enforce.bucketmapjoin 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.allow.user.substitution 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.index.autoupdate 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.ssl.protocol.blacklist 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.dynamic.partition.pruning 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.max.dynamic.partitions.pernode 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.abortedtxn.threshold 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.map.aggr 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for javax.jdo.PersistenceManagerFactoryClass 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.lock.numretries 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.auto.convert.join 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.support.dynamic.service.discovery 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.metadata.username 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.encode.slice.row.count 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.zk.sm.principal 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.optimized.hashtable.probe.percent 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.select.distribute 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.cache.level2 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.am.use.fqdn 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.kerberos.principal 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.rdbms.useLegacyNativeValueStrategy 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.scheduler.node.reenable.min.timeout.ms 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.support.special.characters.tablename 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.validate.acls 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mv.files.thread 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.skip.compile.udf.check 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.index.compact.binary.search 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.cookie.is.secure 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.reorder.nway.joins 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.orcfile.stripe.level 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.compress.output 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.list.num.entries 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authorization.sqlstd.confwhitelist.append 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.encode.vector.serde.enabled 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.explain.dependency.append.tasktype 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.bucketingsorting 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.login.timeout 21:47:08.331 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.scratch.dir.permission 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.repl.cm.interval 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.hashtable.key.count.adjustment 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.failure.hooks 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.integral.jdo.pushdown 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.keytab.file 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.sleep.interval.between.start.attempts 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.client.socket.timeout 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for javax.jdo.option.DetachAllOnCommit 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.yarn.container.mb 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.delta.pct.threshold 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.vectorized.execution.reduce.enabled 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for javax.jdo.option.ConnectionDriverName 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.http.read.timeout 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.orm.retrieveMapNullsAsEmptyStrings 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.blobstore.optimizations.enabled 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.orc.gap.cache 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.tezfiles 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.index.filter 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.authorization.sqlstd.confwhitelist 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.dynamic.partition.hashjoin 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.copyfile.maxnumfiles 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.map.num.entries 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.dynamic.partition.pruning.max.event.size 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.enable 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.encode.formats 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.mode.local.auto 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.reducededuplication.min.reducer 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.max.start.attempts 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.dynamic.partition.mode 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.max.worker.threads 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.costmodel.network 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.fpp 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.driver.run.hooks 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.http.numConnection 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.unlock.numretries 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.session.history.enabled 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.task.scheduler.enable.preemption 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.num.executors 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.groupby 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.cache.max.full 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.connection.class 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.tez.sessions.custom.queue.allowed 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.service.principal 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.check.crossproducts 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server.read.socket.timeout 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.perf.logger 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.plugin.pluginRegistryBundleCheck 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.encode.slice.lrr 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.dbaccess.ssl.properties 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.parallel.thread.number 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.client.password 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.max.reader.wait 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.security.metastore.authenticator.manager 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.hbase.cache.max.writer.wait 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.default.fileformat.managed 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.thrift.http.request.header.size 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.index.compact.file.ignore.hdfs 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.webui.max.threads 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.limittranspose.reductiontuples 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.rollbacktxn 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.scheduler.num.schedulable.tasks.per.node 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.acl 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.memory.size 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.strict.checks.type.safety 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.async.exec.async.compile 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.auto.max.input.size 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.limit.pushdown.memory.usage 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.enable.memory.manager 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.batch.retrieve.table.partition.max 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.blobstore.supported.schemes 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.msck.repair.batch.size 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.dynamic.partition.pruning.max.data.size 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metadata.move.exported.metadata.to.trash 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cli.pretty.output.num.cols 21:47:08.332 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orc.splits.allow.synthetic.fileid 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.zookeeper.session.timeout 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.fetch.output.serde 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.log.trace.id 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.skewjoin.mapjoin.min.split 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.resultset.use.unique.column.names 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.zookeeper.connection.max.retries 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.session.check.interval 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.filter.in.factor 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for parquet.memory.pool.ratio 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.use.op.stats 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.client.stats.counters 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.authentication.spnego.keytab 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.ppd.recognizetransivity 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.io.rcfile.tolerate.corruptions 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.client.secret.bits 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.input.listing.max.threads 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.autogen.columnalias.prefix.label 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.event.listeners 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.tez.session.lifetime.jitter 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orc.compute.splits.num.threads 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.rowoffset 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.web.port 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.default.publisher 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.script.recordwriter 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.ppd.remove.duplicatefilters 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.keystore.password 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.strict.checks.cartesian.product 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.variable.substitute 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.txn.manager 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.rpc.num.handlers 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.vcpus.per.instance 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.intermediate.compression.codec 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.server.min.threads 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.count.open.txns.interval 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.aggregate.stats.cache.enabled 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.min.bloom.filter.entries 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.partition.columns.separate 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.init.hooks 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.dml.events 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.log.every.n.records 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.orc.cache.stripe.details.mem.size 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.scheduler.locality.delay 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.txn.heartbeat.threadpool.size 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.index.compact.query.max.size 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.heartbeat.interval 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.lock.sleep.between.retries 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.test.mode.samplefreq 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.repl.cmrootdir 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.task.scheduler.node.disable.backoff.factor 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.authentication 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.map.aggr.hash.force.flush.memory.threshold 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.async.exec.wait.queue.size 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.explain.user 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.schema.verification 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.exec.inplace.progress 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.am.liveness.connection.sleep.between.retries.ms 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.connect.retries 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.working.directory 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.token.signature 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.memory.per.instance.mb 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.archive.intermediate.extracted 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.costmodel.hdfs.write 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.authentication.kerberos.keytab 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.cpu.vcores 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.msck.path.validation 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.task.scale.memory.reserve.fraction 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.parquet.timestamp.skip.conversion 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.size.per.task 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for datanucleus.connectionPool.maxPoolSize 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.merge.nway.joins 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.compactor.history.reaper.interval 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.txn.strict.locking.mode 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.client.rpc.sasl.mechanisms 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.optimize.sampling.orderby 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.script.trust 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.mapjoin.followby.gby.localtask.max.memory.usage 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.spark.job.monitor.timeout 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.exec.show.job.failure.debug.info 21:47:08.333 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.groupby.orderby.position.alias 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.cbo.costmodel.local.fs.write 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.metastore.transactional.event.listeners 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.encode.vector.serde.async.enabled 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.tez.input.generate.consistent.splits 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.in.place.progress 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.ndv.error 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.enable.doAs 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.zookeeper.namespace 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.stats.atomic 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.druid.indexer.memory.rownum.max 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.daemon.work.dirs 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.xsrf.filter.enabled 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.server2.idle.session.timeout 21:47:08.334 pool-1-thread-1 DEBUG Configuration: Handling deprecation for hive.llap.io.allocator.alloc.max 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.service.shutdown.timeout=30s 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.amlauncher.thread-count=50 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.enabled=false 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.maximum=15 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.numa-awareness.numactl.cmd=/usr/bin/numactl 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.scheduler.heartbeat.interval-ms=1000 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.timeline-client.number-of-async-entities-to-merge=10 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.timeout=60 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.kerberos.principal=HTTP/_HOST@LOCALHOST 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.loadedjob.tasks.max=-1 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.framework.name=local 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.uploader.server.thread-count=50 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.nonsecure-mode.user-pattern=^[_.A-Za-z0-9][-@_.A-Za-z0-9]{0,255}?[$]?$ 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:tfile.fs.output.buffer.size=262144 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.job.task.listener.thread-count=30 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.port.maxRetries=100 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.background.reload.threads=3 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.cross-origin.enabled=false 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.ftp.impl=org.apache.hadoop.fs.ftp.FtpFs 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.secure=false 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.shell.safely.delete.limit.num.files=100 21:47:08.395 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:dfs.bytes-per-checksum=512 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.acl-view-job= 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.background.sleep=25ms 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.limit=${fs.s3a.attempts.maximum} 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.loadedjobs.cache.size=5 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.table.create=false 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.enabled=false 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.with-user-dir=false 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.input.fileinputformat.split.minsize=0 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.container.liveness-monitor.interval-ms=600000 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.client.thread-count=50 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.seqfile.compress.blocksize=1000000 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.checksum.algo.impl=org.apache.hadoop.yarn.sharedcache.ChecksumSHA256Impl 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.interceptor-class.pipeline=org.apache.hadoop.yarn.server.nodemanager.amrmproxy.DefaultRequestInterceptor 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.leveldb-cache-read-cache-size=10485760 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.fetch.retry.interval-ms=1000 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.warehouse.dir=file:/home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-f5605ad4-9734-45ff-a178-642d241e5e7f/metastore 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.maps=0-2 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.include-port-in-node-name=false 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.admin-env=MALLOC_ARENA_MAX=$MALLOC_ARENA_MAX 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.node-removal-untracked.timeout-ms=60000 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.am.max-attempts=2 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.failover.sleep.base.millis=100 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.https.address=0.0.0.0:19890 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-labels.fs-store.impl.class=org.apache.hadoop.yarn.nodelabels.FileSystemNodeLabelsStore 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.collector-service.address=${yarn.nodemanager.hostname}:8048 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.trash.checkpoint.interval=0 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.rdbms.datastoreAdapterClassName=org.datanucleus.store.rdbms.adapter.DerbyAdapter 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.map.output.collector.class=org.apache.hadoop.mapred.MapTask$MapOutputBuffer 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.node-ip-cache.expiry-interval-secs=-1 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.signature.secret.file=*********(redacted) 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.jetty.logs.serve.aliases=true 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.handler=disabled 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.handler-thread-count=10 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.max-completed-applications=1000 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.system-metrics-publisher.enabled=false 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.algorithm.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.algorithm.DefaultPlacementAlgorithm 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.webapp.address=0.0.0.0:8788 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation.token.renew-interval=*********(redacted) 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.nm.uploader.replication.factor=10 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.identifierFactory=datanucleus1 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.negative-cache.secs=30 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.task.container.log.backups=0 21:47:08.396 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.skip.proc-count.auto-incr=true 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.posix.attr.gid.name=gidNumber 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.driver.host=amp-jenkins-worker-04.amp 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.fallback-to-simple-auth-allowed=false 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.enforced=true 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.DetachAllOnCommit=true 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.failover-proxy-provider=org.apache.hadoop.yarn.client.ConfiguredRMFailoverProxyProvider 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.http-authentication.simple.anonymous.allowed=true 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.check-interval.ms=1000 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.acl.reservation-enable=false 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.store.class=org.apache.hadoop.yarn.server.resourcemanager.recovery.FileSystemRMStateStore 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.hard-kill-timeout-ms=10000 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.etag.checksum.enabled=false 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-metrics.enable=true 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.fd-clean-interval-secs=60 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanagers.heartbeat-interval-ms=1000 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.common.configuration.version=3.0.0 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.table.capacity.read=500 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.remote-app-log-dir-suffix=logs 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.windows-container.cpu-limit.enabled=false 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.privileged-containers.allowed=false 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.blocksize=67108864 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.retry.ceiling.ms=60000 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.leveldb-store.path=${hadoop.tmp.dir}/yarn/system/confstore 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.in-memory.initial-delay-mins=10 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.principal=jhs/_HOST@REALM.TLD 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.skip.proc-count.auto-incr=true 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.name=file 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.reduces=0-2 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.num-retries=1000 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.xfs-filter.enabled=true 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:seq.io.sort.mb=100 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.max.version=100 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.https.address=${yarn.timeline-service.hostname}:8190 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.NonTransactionalRead=true 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.address=${yarn.resourcemanager.hostname}:8030 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-labels.enabled=false 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.ui-actions.enabled=true 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.timeout=600000 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.client-server.thread-count=50 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.shell.command.timeout=0s 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.crypto.cipher.suite=AES/CTR/NoPadding 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.elastic-memory-control.oom-handler=org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.DefaultOOMHandler 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.connect.max-wait.ms=900000 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.defaultFS=file:/// 21:47:08.397 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.use-rpc=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.transactionIsolation=read-committed 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.har.impl.disable.cache=true 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.ui2.enable=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.compression.codec.bzip2.library=system-native 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.distributed-scheduling.enabled=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.connection-keep-alive.timeout=5 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.https.address=${yarn.resourcemanager.hostname}:8090 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.address=0.0.0.0:10020 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-tokens.master-key-rolling-interval-secs=*********(redacted) 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.is.minicluster=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.address=${yarn.nodemanager.hostname}:0 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.abfss.impl=org.apache.hadoop.fs.azurebfs.SecureAzureBlobFileSystem 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.s3a.impl=org.apache.hadoop.fs.s3a.S3A 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.combine.progress.records=10000 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.epoch.range=0 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.am.max-attempts=2 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.validateTables=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.hierarchy=/hadoop-yarn 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.wasbs.impl=org.apache.hadoop.fs.azure.Wasbs 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.cache-store-class=org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.transfer.mode=BLOCK_TRANSFER_MODE 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.start.cleanup.scratchdir=true 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.server.log.slow.rpc=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.cache.level2=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.node-labels.provider.fetch-interval-ms=1800000 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.webapp.https.address=0.0.0.0:8091 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.testing=true 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.cross-origin.enabled=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.wasb.impl=org.apache.hadoop.fs.azure.NativeAzureFileSystem 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.auto-update.containers=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.job.committer.cancel-timeout=60000 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.zk-store.parent-path=/confstore 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.default-container-executor.log-dirs.permissions=710 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.attempt.diagnostics.limit.kc=64 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.bytes-per-checksum=512 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory-mb=-1 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.driver.port=38658 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.abfs.impl=org.apache.hadoop.fs.azurebfs.Abfs 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.writer.flush-interval-seconds=60 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.fast.upload.active.blocks=4 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.credential.clear-text-fallback=true 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.collector-service.thread-count=5 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.secure.mode=false 21:47:08.398 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.joblist.cache.size=20000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.pre.event.listeners= 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.host=0.0.0.0 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fs.state-store.num-retries=0 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanager-connect-retries=10 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.num-log-files-per-app=30 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.low-watermark=0.3f 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.magic.enabled=false 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.max-retries=30 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:dfs.ha.fencing.ssh.connect-timeout=30000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation-enable=false 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.system-metrics-publisher.enabled=false 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.markreset.buffer.percent=0.0 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.viewfs.impl=org.apache.hadoop.fs.viewfs.ViewFs 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.io.sort.factor=10 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.client.thread-count=25 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.new-active.rpc-timeout.ms=60000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-localizer.java.opts=-Xmx256m 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.datestring.cache.size=200000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.acl-modify-job= 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.windows-container.memory-limit.enabled=false 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.address=${yarn.timeline-service.hostname}:8188 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.PersistenceManagerFactoryClass=org.datanucleus.api.jdo.JDOPersistenceManagerFactory 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.job.committer.commit-window=10000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-manager.thread-count=20 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.fixed.ports=false 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.tags.system=YARN,HDFS,NAMENODE,DATANODE,REQUIRED,SECURITY,KERBEROS,PERFORMANCE,CLIENT ,SERVER,DEBUG,DEPRECATED,COMMON,OPTIONAL 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.integral.jdo.pushdown=false 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.cluster.max-application-priority=0 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.ttl-enable=true 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.store.fs.uri=${hadoop.tmp.dir}/mapred/history/recoverystore 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.caller.context.signature.max.size=40 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.load.resource-types.from-server=false 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.zookeeper.session-timeout.ms=10000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.hadoop.fs.file.impl=org.apache.spark.DebugFilesystem 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:tfile.io.chunk.size=1048576 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.table.capacity.write=100 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.slowtaskthreshold=1.0 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization, org.apache.hadoop.io.serializer.avro.AvroReflectSerialization 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.failover.sleep.max.millis=2000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.directory.search.timeout=10000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.warehouse.dir=file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/spark-warehouse/org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.store.max-logs=1000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-attributes.provider.fetch-interval-ms=600000 21:47:08.399 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.swift.impl=org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.local-cache.max-files-per-directory=8192 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.enabled=false 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.acl=world:anyone:rwcda 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.sort.spill.percent=0.80 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.scan-interval-seconds=60 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-attribute.fs-store.impl.class=org.apache.hadoop.yarn.server.resourcemanager.nodelabels.FileSystemNodeAttributeStore 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.interval=500ms 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.best-effort=false 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled=*********(redacted) 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.posix.attr.uid.name=uidNumber 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.swebhdfs.impl=org.apache.hadoop.fs.SWebHdfs 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.elastic-memory-control.timeout-sec=5 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.ifile.readahead=true 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms=300000 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.reader.webapp.address=${yarn.timeline-service.webapp.address} 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.algorithm.pool-size=1 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hbase.coprocessor.jar.hdfs.location=/hbase/coprocessor/hadoop-yarn-server-timelineservice.jar 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.num.refill.threads=2 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.command-opts=-Xmx1024m 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.cluster.local.dir=${hadoop.tmp.dir}/mapred/local 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.mapfile.bloom.error.rate=0.005 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.client.resolve.topology.enabled=false 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.allowed-runtimes=default 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.class=org.apache.hadoop.yarn.server.sharedcachemanager.store.InMemorySCMStore 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.graceful-fence.rpc-timeout.ms=5000 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.replication=3 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.uid.cache.secs=14400 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.maxtaskfailures.per.tracker=3 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.metadatastore.impl=org.apache.hadoop.fs.s3a.s3guard.NullMetadataStore 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.skip.checksum.errors=false 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.file.impl=org.apache.spark.DebugFilesystem 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client-am.ipc.max-retries-on-timeouts=3 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.timeout=200000 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.max.split.locations=15 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.max-queue-length=15 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.session.timeout.ms=60000 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.cache-ttl.secs=300 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jvm.system-properties-to-log=os.name,os.version,java.home,java.runtime.version,java.vendor,java.version,java.vm.name,java.class.path,java.io.tmpdir,user.dir,user.name 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.opportunistic-container-allocation.nodes-used=10 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.active-dir=/tmp/entity-file-history/active 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.transfer.buffer.size=131072 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.retry-interval-ms=1000 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.http.policy=HTTP_ONLY 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.memory.debugFill=true 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.socket.send.buffer=8192 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.abfss.impl=org.apache.hadoop.fs.azurebfs.Abfss 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.uploader.server.address=0.0.0.0:8046 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation-token.max-conf-size-bytes=*********(redacted) 21:47:08.400 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.token.validity=*********(redacted) 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.max.connections=0 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.yarn.nodemanager.resource.memory-mb=4096 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.emit-timeline-data=false 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.system-reserved-memory-mb=-1 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.kerberos.min.seconds.before.relogin=60 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.move.thread-count=3 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.admin.client.thread-count=1 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.dispatcher.drain-events.timeout=300000 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.buffer.dir=${hadoop.tmp.dir}/s3a 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.enabled.protocols=TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.admin.address=0.0.0.0:10033 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation-status.time-out.ms=600000 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.uris= 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.assumed.role.sts.endpoint.region=us-west-1 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.port=13562 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.max-log-aggregation-diagnostics-in-memory=10 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.health-checker.interval-ms=600000 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.in.test=true 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.clientrm.interceptor-class.pipeline=org.apache.hadoop.yarn.server.router.clientrm.DefaultClientRequestInterceptor 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-appid-node.split-index=0 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.blocksize=67108864 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.sandbox-mode.local-dirs.permissions=read 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.rmadmin.interceptor-class.pipeline=org.apache.hadoop.yarn.server.router.rmadmin.DefaultRMAdminRequestInterceptor 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-container-debug-info.enabled=true 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.max-cached-nodemanagers-proxies=0 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.delete-delay-ms=20 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.delete.debug-delay-sec=0 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.pmem-check-enabled=true 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage=90.0 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.app-submission.cross-platform=false 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.work-preserving-recovery.scheduling-wait-ms=10000 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-retry-minimum-interval-ms=1000 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.secs=300 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.enabled=false 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.local.sas.key.mode=false 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.maximum.data.length=67108864 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.max.threads=0 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.pipeline.cache-max-size=25 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.load-comparator=QUEUE_LENGTH 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.authorization=false 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.complete.cancel.delegation.tokens=*********(redacted) 21:47:08.401 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.ui.enabled=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.paging.maximum=5000 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:nfs.exports.allowed.hosts=* rw 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.ha.enable=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.http.policy=HTTP_ONLY 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.in-memory.check-period-mins=720 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.ssl=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.app.name=test-sql-context 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.rdbms.useLegacyNativeValueStrategy=true 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.application-client-protocol.poll-interval-ms=200 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.leveldb-store.compaction-interval-secs=86400 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.writer.class=org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.zookeeper.parent-znode=/hadoop-ha 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.server2.transport.mode=binary 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.policy.class=org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AllContainerLogAggregationPolicy 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.merge.percent=0.66 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.codegen.fallback=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.filter.group=(objectClass=group) 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.scheduler.pool-size=1 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.schema.verification=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resourcemanager.minimum.version=NONE 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.speculative-cap-running-tasks=0.1 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.admin.acl=* 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.supervised=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.admin.thread-count=1 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.automatic-failover.enabled=true 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.skip.maxgroups=0 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.connect.timeout=180000 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.address=${yarn.resourcemanager.hostname}:8032 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.ping=true 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.local-fs.write-limit.bytes=-1 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.adl.oauth2.access.token.provider.type=*********(redacted) 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.ssl.file.buffer.size=65536 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.automatic-failover.embedded=true 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.gpu.docker-plugin=nvidia-docker-v1 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.enabled=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.purge=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.store.class=file 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.queue-limit-stdev=1.0f 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.max.attempts=5 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.output.fileoutputformat.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-monitor.procfs-tree.smaps-based-rss.enabled=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.bind.wildcard.addr=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.rest-csrf.enabled=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.connect-retry-interval.ms=1000 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.keytab=/etc/krb5.keytab 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.keytab=/etc/security/keytab/jhs.service.keytab 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.threads.max=10 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.input.buffer.percent=0.70 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.allowed-container-networks=host,none,bridge 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-labels.resync-interval-ms=120000 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.tmp.dir=/tmp/hadoop-${user.name} 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.maps=2 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.rest-csrf.custom-header=X-XSRF-Header 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.max.retry.interval=5000 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.retain-check-interval-seconds=-1 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-tracker.client.thread-count=50 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.rm.system-metrics-publisher.emit-container-events=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.start-time-read-cache-size=10000 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.automatic-failover.zk-base-path=/yarn-leader-election 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.seqfile.local.dir=${hadoop.tmp.dir}/io/local 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.throttle.retry.interval=100ms 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.wasb.impl=org.apache.hadoop.fs.azure.Wasb 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.validateConstraints=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.submit.file.replication=10 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.minicluster.fixed.ports=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.threshold=2147483647 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.done-dir=${yarn.app.mapreduce.am.staging-dir}/history/done 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.idlethreshold=4000 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.strict-resource-usage=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.input.buffer.percent=0.0 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.userremapping-gid-threshold=1 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.rest-csrf.enabled=false 21:47:08.402 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.host.port=21 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.ping.interval=60000 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.end.function.listeners= 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.history-writer.multi-threaded-dispatcher.pool-size=10 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.admin.address=${yarn.resourcemanager.hostname}:8033 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.client-write-packet-size=65536 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.kill.max=10 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.speculative=true 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.key.default.bitlength=128 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reducer.unconditional-preempt.delay.sec=300 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.interval-ms=120000 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log.deletion-threads-count=4 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.filter-entity-list-by-user=false 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connection.maxidletime=10000 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.io.sort.mb=100 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.client.thread-count=5 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.erasurecode.codec.rs.rawcoders=rs_native,rs_java 21:47:08.403 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.erasurecode.codec.rs-legacy.rawcoders=rs-legacy_java 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.admin.address=0.0.0.0:8047 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.algorithm.iterator=SERIAL 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.testkey=true 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.connectionPoolingType=BONECP 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.cache.cleanup.interval-ms=600000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.crypto.codec.classes.aes.ctr.nopadding=org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec, org.apache.hadoop.crypto.JceAesCtrCryptoCodec 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.cache.limit.max-resources-mb=0 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.ssl.enabled=true 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.process-kill-wait.ms=5000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.hdfs-servers=${fs.defaultFS} 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.workaround.non.threadsafe.getpwuid=true 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.df.interval=60000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multiobjectdelete.enable=true 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.cleaner.resource-sleep-ms=0 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.min-healthy-disks=0.25 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.shell.missing.defaultFs.warning=false 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.file.buffer.size=65536 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.attr.member=member 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.random.device.file.path=/dev/urandom 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.sensitive-config-keys=*********(redacted) 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.max.retries=9 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.intermediate-data-encryption.enable=false 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.connect.retry-interval.ms=30000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container.stderr.pattern={*stderr*,*STDERR*} 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.minimum-allocation-mb=1024 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.staging-dir=/tmp/hadoop-yarn/staging 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.read.timeout=180000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.max-age=1800 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.erasurecode.codec.xor.rawcoders=xor_native,xor_java 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.establish.timeout=5000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.running.map.limit=0 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.control-resource-monitoring=false 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.require.client.cert=false 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.kerberos.kinit.command=kinit 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.state-store.class=org.apache.hadoop.yarn.server.federation.store.impl.MemoryFederationStateStore 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.ui.showConsoleProgress=false 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.log.level=INFO 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.dns.log-slow-lookups.threshold.ms=1000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.ubertask.enable=false 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:adl.http.timeout=-1 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.retry-attempts=3 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.caller.context.enabled=false 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.vmem-pmem-ratio=2.1 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.rpc.protection=authentication 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.rpc-timeout.ms=45000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.remote-app-log-dir=/tmp/logs 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.timeout-ms=10000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.cli.prune.age=86400000 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.pcores-vcores-multiplier=1.0 21:47:08.404 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.sandbox-mode=disabled 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.containerlauncher.threadpool-initial-size=10 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.master.rest.enabled=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.threads=8 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.retry-interval-ms=1000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.crypto.buffer.size=8192 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-labels.provider.fetch-interval-ms=600000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.store.leveldb.path=${hadoop.tmp.dir}/mapred/history/recoverystore 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.failover-retries-on-socket-timeouts=0 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.enabled=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.authorization.caching.enable=true 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.instrumentation.requires.admin=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.delete.thread-count=4 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.finish-when-all-reducers-done=true 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.jaas.context=Client 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.path=${hadoop.tmp.dir}/yarn/timeline 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionUserName=APP 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.map.index.interval=128 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.max-queue-wait-time-ms=100 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.abfs.impl=org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.counters.max=120 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.rest-csrf.enabled=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.store-class=org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.move.interval-ms=180000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.fetch.thread-count=4 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.client.thread-count=50 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.hostname.verifier=DEFAULT 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-state-store.path=${hadoop.tmp.dir}/yarn/timeline 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.classloader=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.map.params=${mapreduce.task.profile.params} 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.timeout=20000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.auth_to_local.mechanism=hadoop 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.app-collector.linger-period.ms=60000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nm.liveness-monitor.expiry-interval-ms=600000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.reservation-system.planfollower.time-step=1000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.enable-userremapping.allowed=true 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.api-service.enable=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.enabled=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.retry.interval=1000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.du.interval=600000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container.stderr.tail.bytes=4096 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.read.timeout.ms=60000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.warn.after.ms=5000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.bytes-per-checksum=512 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.outputcommitter.factory.scheme.s3a=org.apache.hadoop.fs.s3a.commit.S3ACommitterFactory 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.background.reload=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-monitor.enabled=true 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.elastic-memory-control.enabled=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:net.topology.script.number.args=100 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.merge.progress.records=10000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.address=${yarn.nodemanager.hostname}:8040 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.keytab=/etc/krb5.keytab 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.fetch.retry.timeout-ms=30000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.rm.container-allocation.expiry-interval-ms=600000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.fileoutputcommitter.algorithm.version=2 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.work-preserving-recovery.enabled=true 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.skip.maxrecords=0 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.root-dir=/sharedcache 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.throttle.limit=${fs.s3a.attempts.maximum} 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.server2.thrift.http.port=0 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.type=simple 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.cache.limit.max-resources=0 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.userlog.limit.kb=0 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.monitor.enable=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.max.retries=10 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.retry.times=5 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-monitor.interval-ms=3000 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.unsafe.exceptionOnMemoryLeak=true 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.gpu.allowed-gpu-devices=auto 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.executor.id=driver 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.sharedcache.mode=disabled 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.rest-csrf.custom-header=X-XSRF-Header 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.listen.queue.size=128 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.mutation.acl-policy.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.DefaultConfigurationMutationACLPolicy 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.cpu.vcores=1 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.file-formats=TFile 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.fd-retain-secs=300 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.user.group.static.mapping.overrides=dr.who=; 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.validateColumns=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.sas.expiry.period=90d 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.store.class=org.apache.hadoop.mapreduce.v2.hs.HistoryServerFileSystemStateStoreService 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fail-fast=${yarn.fail-fast} 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.proxy-user-privileges.enabled=false 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.webapp.interceptor-class.pipeline=org.apache.hadoop.yarn.server.router.webapp.DefaultRequestInterceptorREST 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.cgroups.soft-limit-percentage=90.0 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reducer.preempt.delay.sec=0 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.util.hash.type=murmur 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.storeManagerType=rdbms 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-validator=basic 21:47:08.405 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client.job.max-retries=3 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.retry-delay.max.ms=60000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.connection.timeout.ms=60000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.params=-agentlib:hprof=cpu=samples,heap=sites,force=n,thread=y,verbose=n,file=%s 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.shuffle.log.backups=0 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.server2.thrift.port=0 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-diagnostics-maximum-size=10000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.retry.interval.ms=1000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.delete-timeout-ms=1000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.file.impl=org.apache.hadoop.fs.local.LocalFs 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.roll-monitoring-interval-seconds=-1 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.cleaner.interval-ms=86400000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.quorum=localhost:2181 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.output.fileoutputformat.compress=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.am-rm-tokens.master-key-rolling-interval-secs=*********(redacted) 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.assumed.role.session.duration=30m 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.conversion.rule=none 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.server.conf=ssl-server.xml 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.throttle.interval=1000ms 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:seq.io.sort.factor=100 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.cleaner.initial-delay-mins=10 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.completion.pollinterval=5000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.keystores.factory.class=org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.resource.cpu-vcores=1 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.enabled=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.capabilities=CHOWN,DAC_OVERRIDE,FSETID,FOWNER,MKNOD,NET_RAW,SETGID,SETUID,SETFCAP,SETPCAP,NET_BIND_SERVICE,SYS_CHROOT,KILL,AUDIT_WRITE 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.acl.enable=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.done-dir=/tmp/entity-file-history/done/ 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fs.state-store.uri=${hadoop.tmp.dir}/yarn/system/rmstore 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.always-scan-user-dir=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.opportunistic-containers-use-pause-for-preemption=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.nonsecure-mode.local-user=nobody 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.optimizer.excludedRules=org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.reader.class=org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.configuration.provider-class=org.apache.hadoop.yarn.LocalConfigurationProvider 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.userremapping-uid-threshold=1 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.configuration.file-system-based-store=/yarn/conf 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.cache.limit.max-single-resource-mb=0 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.stop.grace-period=10 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-profiles.source-file=resource-profiles.json 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.percentage-physical-cpu-limit=100 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.client.thread-count=10 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:tfile.fs.input.buffer.size=262144 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.progressmonitor.pollinterval=1000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-dirs=${yarn.log.dir}/userlogs 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.automatic.close=true 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.hostname=0.0.0.0 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.cgroups.swappiness=0 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.stream-buffer-size=4096 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.fail-fast=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.app-aggregation-interval-secs=15 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.filter.user=(&(objectClass=user)(sAMAccountName={0})) 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-localizer.log.level=INFO 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.address=${yarn.timeline-service.hostname}:10200 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.ubertask.maxmaps=9 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.threads.keepalivetime=60 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.files.preserve.failedtasks=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client.job.retry-interval=2000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.graceful-fence.connection.retries=1 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation.token.max-lifetime=*********(redacted) 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.drain-entities.timeout.ms=2000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.fpga.vendor-plugin.class=org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.fpga.IntelFpgaOpenclPlugin 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.summary-store=org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.cpu.vcores=1 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.encrypted-intermediate-data.buffer.kb=128 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.client.resolve.remote.symlinks=true 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.https.address=0.0.0.0:8044 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.allowed-origins=* 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.encrypted-intermediate-data=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.retain-seconds=604800 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.metrics.runtime.buckets=60,300,1440 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.generic-application-history.max-applications=10000 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.local-dirs=${hadoop.tmp.dir}/nm-local-dir 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.connection-keep-alive.enable=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-labels.configuration-type=centralized 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.path.style.access=false 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.aux-services.mapreduce_shuffle.class=org.apache.hadoop.mapred.ShuffleHandler 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.in-memory.staleness-period-mins=10080 21:47:08.406 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.adl.impl=org.apache.hadoop.fs.adl.AdlFileSystem 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanager.minimum.version=NONE 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.staging-dir.erasurecoding.enabled=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:net.topology.impl=org.apache.hadoop.net.NetworkTopology 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.map.index.skip=0 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.reader.webapp.https.address=${yarn.timeline-service.webapp.https.address} 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.data.connection.mode=ACTIVE_LOCAL_DATA_CONNECTION_MODE 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.local-fs.single-disk-limit.check.kill-limit-exceed=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.maximum-allocation-vcores=4 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.allowed-headers=X-Requested-With,Content-Type,Accept,Origin 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.compression-type=none 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.version=1.0f 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.ipc.rpc.class=org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.maxattempts=4 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.dns.log-slow-lookups.enabled=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.committer.setup.cleanup.needed=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.master=local[2] 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.running.reduce.limit=0 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.maximum.response.length=134217728 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.token.tracking.ids.enabled=*********(redacted) 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.caller.context.max.size=128 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.host-pid-namespace.allowed=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.delayed-removal.allowed=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.system.acls=sasl:yarn@, sasl:mapred@, sasl:hdfs@ 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.dir=${hadoop.tmp.dir}/yarn-nm-recovery 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.fast.upload.buffer=disk 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.intermediate-done-dir=${yarn.app.mapreduce.am.staging-dir}/history/done_intermediate 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.shuffle.log.separate=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.max.total.tasks=5 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.readahead.range=64K 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.rawstore.impl=org.apache.hadoop.hive.metastore.ObjectStore 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.simple.anonymous.allowed=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.attempts.maximum=20 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.connection.timeout.ms=15000 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation-token-renewer.thread-count=*********(redacted) 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.health-checker.script.timeout-ms=1200000 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.start-time-write-cache-size=10000 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-profiles.enabled=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hbase-schema.prefix=prod. 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.authorization=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.log.level=INFO 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.decommissioning-nodes-watcher.poll-interval-secs=20 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.output.fileoutputformat.compress.type=RECORD 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.leveldb-state-store.path=${hadoop.tmp.dir}/yarn/system/rmstore 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.rest-csrf.custom-header=X-XSRF-Header 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.ifile.readahead.bytes=4194304 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.app-checker.class=org.apache.hadoop.yarn.server.sharedcachemanager.RemoteAppChecker 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.nonsecure-mode.limit-users=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.detect-hardware-capabilities=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.cluster.acls.enabled=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.retry-after-no-speculate=1000 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.group.hierarchy.levels=0 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fs.state-store.retry-interval-ms=1000 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.stream-buffer-size=4096 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.application-timeouts.monitor.interval-ms=3000 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.output.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.speculative=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.retry-after-speculate=15000 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.connectionPool.maxPoolSize=10 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.mount=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.container.log.backups=0 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.log.level=INFO 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reduce.slowstart.completedmaps=0.05 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.http-authentication.type=simple 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.attr.group.name=cn 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.fpga.allowed-fpga-devices=0,1 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.internal-timers-ttl-secs=420 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.logs.enabled=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.block.size=32M 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.client-server.address=0.0.0.0:8045 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.logaggregation.threadpool-size-max=100 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.hostname=0.0.0.0 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation.key.update-interval=86400000 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.fetch.retry.enabled=${yarn.nodemanager.recovery.enabled} 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.memory.mb=-1 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.skip.start.attempts=2 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.hdfs.impl=org.apache.hadoop.fs.Hdfs 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.enable=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.cache.level2.type=none 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.tcpnodelay=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.rpc-timeout.ms=0 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.low-latency=false 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.input.lineinputformat.linespermap=1 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.interceptor.user.threadpool-size=5 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionURL=*********(redacted) 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.autoCreateAll=true 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.max.retries.on.timeouts=45 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.read-cache-size=104857600 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.har.impl=org.apache.hadoop.fs.HarFs 21:47:08.407 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.split.metainfo.maxsize=10000000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.am.liveness-monitor.expiry-interval-ms=600000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.container-tokens.master-key-rolling-interval-secs=*********(redacted) 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.app-cache-size=10 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.socket.recv.buffer=8192 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-tracker.address=${yarn.resourcemanager.hostname}:8031 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-labels.provider.fetch-timeout-ms=1200000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.heap.memory-mb.ratio=0.8 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.leveldb-state-store.compaction-interval-secs=3600 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.rest-csrf.custom-header=X-XSRF-Header 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.fs.path=file://${hadoop.tmp.dir}/yarn/system/schedconf 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.output.filter=FAILED 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.filter.initializers=org.apache.hadoop.http.lib.StaticUserWebFilter 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.memory.mb=-1 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hostname=0.0.0.0 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionDriverName=org.apache.derby.jdbc.EmbeddedDriver 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.replication=1 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-metrics.unregister-delay-ms=10000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-metrics.period-ms=-1 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.fileoutputcommitter.task.cleanup.enabled=false 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log.retain-seconds=10800 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.plugin.pluginRegistryBundleCheck=LOG 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.cleaner-interval-seconds=3600 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.keytab=/etc/krb5.keytab 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.event.listeners= 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.providers.combined=true 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.merge.inmem.threshold=1000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.recovery.enabled=false 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.saskey.usecontainersaskeyforallaccess=true 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.nm.uploader.thread-count=20 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanager-graceful-decommission-timeout-secs=3600 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.ssl.enabled=false 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hbase.coprocessor.app-final-value-retention-milliseconds=259200000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.abort.pending.uploads=true 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.opportunistic-containers-max-queue-length=0 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.state-store.max-completed-applications=${yarn.resourcemanager.max-completed-applications} 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionPassword=*********(redacted) 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.minimum-allowed-tasks=10 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.test.home=/home/jenkins/workspace/NewSparkPullRequestBuilder 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.retain-seconds=-1 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.min-free-space-per-disk-mb=0 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.max-age-ms=604800000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.allowed-methods=GET,POST,HEAD 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.opportunistic-container-allocation.enabled=false 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.address=0.0.0.0:19888 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.system.tags=YARN,HDFS,NAMENODE,DATANODE,REQUIRED,SECURITY,KERBEROS,PERFORMANCE,CLIENT ,SERVER,DEBUG,DEPRECATED,COMMON,OPTIONAL 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.file-controller.TFile.class=org.apache.hadoop.yarn.logaggregation.filecontroller.tfile.LogAggregationTFileController 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.nodemanager-connect.max-wait-ms=180000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.address=${yarn.resourcemanager.hostname}:8088 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.enable=false 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.parallelcopies=5 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.webhdfs.impl=org.apache.hadoop.fs.WebHdfs 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.trash.interval=0 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client.max-retries=3 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.authentication=simple 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.reduce.params=${mapreduce.task.profile.params} 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.resource.mb=1536 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.input.fileinputformat.list-status.num-threads=1 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-executor.class=org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.mapfile.bloom.size=1048576 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.ttl-ms=604800000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.min-queue-length=5 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.cpu-vcores=-1 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reduces=1 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.size=100M 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.minimum-allocation-vcores=1 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.speculative-cap-total-tasks=0.01 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.client.conf=ssl-client.xml 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.queuename=default 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.encrypted-intermediate-data-key-size-bits=128 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.metadatastore.authoritative=false 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.sleep-after-disconnect.ms=1000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.shuffle.log.limit.kb=0 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.application-client-protocol.poll-timeout-ms=-1 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.jhist.format=binary 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.rdbms.initializeColumnInfo=NONE 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.enabled=false 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.staticuser.user=dr.who 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.exit.timeout.check-interval-ms=20000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.intermediate-user-done-dir.permissions=770 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.exit.timeout=60000 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.resources-handler.class=org.apache.hadoop.yarn.server.nodemanager.util.DefaultLCEResourcesHandler 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.memory.limit.percent=0.25 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.reservation-system.enable=false 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.output.compress=false 21:47:08.408 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.zookeeper.acl=world:anyone:rwcda 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.server.max.connections=0 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.default-container-network=host 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.webapp.address=0.0.0.0:8089 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.maximum-allocation-mb=8192 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.monitor.policies=org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity.ProportionalCapacityPreemptionPolicy 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.cleaner.period-mins=1440 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.gpu.docker-plugin.nvidia-docker-v1.endpoint=http://localhost:3476/v1.0/docker/cli 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.container.log.limit.kb=0 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.retry.interval=1000 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.http-cross-origin.enabled=false 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.wasbs.impl=org.apache.hadoop.fs.azure.NativeAzureFileSystem$Secure 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.subcluster-resolver.class=org.apache.hadoop.yarn.server.federation.resolver.DefaultSubClusterResolverImpl 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-state-store.parent-path=/rmstore 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.cleaner.enable=true 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.fd-flush-interval-secs=10 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.expiry=43200000 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.nodemanager-client-async.thread-pool-max-size=500 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.maxattempts=4 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.exec.scratchdir=/home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.sorting-nodes-interval-ms=1000 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.tmp.path=tmp/staging 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.sleep-delay-before-sigkill.ms=250 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.min-queue-wait-time-ms=10 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.retry.attempts=0 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.count-logical-processors-as-cores=false 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.root=/registry 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:adl.feature.ownerandgroup.enableupn=false 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-max-znode-size.bytes=1048576 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reduce.shuffle.consumer.plugin.class=org.apache.hadoop.mapreduce.task.reduce.Shuffle 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delayed.delegation-token.removal-interval-ms=*********(redacted) 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.cache.target-size-mb=10240 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.conflict-mode=fail 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.Multithreaded=true 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.libjars.wildcard=true 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.unique-filenames=true 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-attributes.provider.fetch-timeout-ms=1200000 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.list.version=2 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.client-write-packet-size=65536 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.adl.impl=org.apache.hadoop.fs.adl.Adl 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.key.default.cipher=AES/CTR/NoPadding 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.failover-retries=0 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.purge.age=86400 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.local-fs.single-disk-limit.check.interval-ms=5000 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:net.topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.address=0.0.0.0:8049 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.server.listen.queue.size=128 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:map.sort.class=org.apache.hadoop.util.QuickSort 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.viewfs.rename.strategy=SAME_MOUNTPOINT 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.authentication.retry-count=1 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.permissions.umask-mode=022 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.assumed.role.credentials.provider=org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.vmem-check-enabled=true 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.numa-awareness.enabled=false 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.compaction-interval-secs=3600 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client-am.ipc.max-retries=3 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.registry.base-dir=yarnfederation/ 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.app.id=local-1593406028241 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.max.map=-1 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.local-fs.single-disk-limit.bytes=-1 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.ubertask.maxreduces=1 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.size=500 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.java.secure.random.algorithm=SHA1PRNG 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.cli-check.rpc-timeout.ms=20000 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.jobname.limit=50 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.nodemanager-connect.retry-interval-ms=10000 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.state-store-class=org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.env-whitelist=JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_HOME,PATH,LANG,TZ 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.nested-level=3 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.user.agent.prefix=unknown 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-delegation-token-node.split-index=*********(redacted) 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.numa-awareness.read-topology=false 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.address=${yarn.nodemanager.hostname}:8042 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:rpc.metrics.quantile.enable=false 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.registry.class=org.apache.hadoop.registry.client.impl.FSRegistryOperationsService 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.admin.acl=* 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.system-metrics-publisher.dispatcher.pool-size=10 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.queue-placement-rules=user-group 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.recovery.enabled=false 21:47:08.409 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.rest-csrf.enabled=false 21:47:08.418 pool-1-thread-1 DEBUG SessionState: SessionState user: null 21:47:08.440 pool-1-thread-1 DEBUG SessionState: HDFS root scratch dir: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28 with schema null, permission: rwxrwxrwx 21:47:08.443 pool-1-thread-1 INFO SessionState: Created HDFS directory: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28/jenkins 21:47:08.447 pool-1-thread-1 INFO SessionState: Created HDFS directory: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28/jenkins/9adbc9c9-f4fb-460a-91a0-0872aec34b1e 21:47:08.450 pool-1-thread-1 INFO SessionState: Created local directory: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/jenkins/9adbc9c9-f4fb-460a-91a0-0872aec34b1e 21:47:08.454 pool-1-thread-1 INFO SessionState: Created HDFS directory: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28/jenkins/9adbc9c9-f4fb-460a-91a0-0872aec34b1e/_tmp_space.db 21:47:08.465 pool-1-thread-1 INFO HiveClientImpl: Warehouse location for Hive client (version 2.3.7) is file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/spark-warehouse/org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite 21:47:08.466 pool-1-thread-1 INFO ServerUtils: Cleaning scratchDir : /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28 21:47:08.507 pool-1-thread-1 INFO SessionManager: Operation log root directory is created: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/jenkins/operation_logs 21:47:08.513 pool-1-thread-1 INFO SessionManager: HiveServer2: Background operation thread pool size: 100 21:47:08.513 pool-1-thread-1 INFO SessionManager: HiveServer2: Background operation thread wait queue size: 100 21:47:08.513 pool-1-thread-1 INFO SessionManager: HiveServer2: Background operation thread keepalive time: 10 seconds 21:47:08.520 pool-1-thread-1 INFO AbstractService: Service:OperationManager is inited. 21:47:08.520 pool-1-thread-1 INFO AbstractService: Service:SessionManager is inited. 21:47:08.521 pool-1-thread-1 INFO AbstractService: Service: CLIService is inited. 21:47:08.522 pool-1-thread-1 INFO AbstractService: Service:ThriftBinaryCLIService is inited. 21:47:08.522 pool-1-thread-1 INFO AbstractService: Service: HiveServer2 is inited. 21:47:08.523 pool-1-thread-1 INFO AbstractService: Service:OperationManager is started. 21:47:08.523 pool-1-thread-1 INFO AbstractService: Service:SessionManager is started. 21:47:08.524 pool-1-thread-1 INFO AbstractService: Service: CLIService is started. 21:47:08.524 pool-1-thread-1 INFO AbstractService: Service:ThriftBinaryCLIService is started. 21:47:08.527 pool-1-thread-1 DEBUG HiveAuthFactory: Cannot find private field "keytabFile" in class: org.apache.hadoop.security.UserGroupInformation java.lang.NoSuchFieldException: keytabFile at java.lang.Class.getDeclaredField(Class.java:2070) at org.apache.hive.service.auth.HiveAuthFactory.(HiveAuthFactory.java:105) at org.apache.hive.service.cli.thrift.ThriftBinaryCLIService.initializeServer(ThriftBinaryCLIService.java:59) at org.apache.hive.service.cli.thrift.ThriftCLIService.start(ThriftCLIService.java:179) at org.apache.hive.service.CompositeService.start(CompositeService.java:70) at org.apache.hive.service.server.HiveServer2.start(HiveServer2.java:105) at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2.start(HiveThriftServer2.scala:154) at org.apache.spark.sql.hive.thriftserver.HiveThriftServer2$.startWithContext(HiveThriftServer2.scala:64) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.startThriftServer(SharedThriftServer.scala:107) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$beforeAll$1(SharedThriftServer.scala:52) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at scala.util.Try$.apply(Try.scala:213) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.beforeAll(SharedThriftServer.scala:52) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.beforeAll$(SharedThriftServer.scala:49) at org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite.beforeAll(ThriftServerWithSparkContextSuite.scala:51) at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212) at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:59) at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317) at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510) at sbt.ForkMain$Run$2.call(ForkMain.java:296) at sbt.ForkMain$Run$2.call(ForkMain.java:286) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 21:47:08.568 pool-1-thread-1 INFO ThriftCLIService: Starting ThriftBinaryCLIService on port 41409 with 5...500 worker threads 21:47:08.568 pool-1-thread-1 INFO AbstractService: Service:HiveServer2 is started. 21:47:08.568 pool-1-thread-1 INFO HiveThriftServer2: HiveThriftServer2 started 21:47:08.574 pool-1-thread-1 INFO ThriftServerWithSparkContextInBinarySuite: Started HiveThriftServer2: mode=binary, port=41409, attempt=0 21:47:08.583 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: org.apache.derby.jdbc.AutoloadedDriver - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.derby/derby/jars/derby-10.12.1.1.jar!/org/apache/derby/jdbc/AutoloadedDriver.class 21:47:08.583 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: java.sql.Driver 21:47:08.584 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: java.sql.SQLFeatureNotSupportedException 21:47:08.584 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: org.apache.derby.jdbc.AutoloadedDriver40 - null 21:47:08.584 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: java.sql.DriverManager 21:47:08.585 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: org.apache.hive.jdbc.HiveDriver - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hive/hive-jdbc/jars/hive-jdbc-2.3.7.jar!/org/apache/hive/jdbc/HiveDriver.class 21:47:08.586 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: org.apache.hive.jdbc.ZooKeeperHiveClientException - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/org.apache.hive/hive-jdbc/jars/hive-jdbc-2.3.7.jar!/org/apache/hive/jdbc/ZooKeeperHiveClientException.class 21:47:08.587 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: java.sql.Connection 21:47:08.587 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: org.h2.Driver - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/com.h2database/h2/jars/h2-1.4.195.jar!/org/h2/Driver.class 21:47:08.588 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: org.h2.JdbcDriverBackwardsCompat - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/com.h2database/h2/jars/h2-1.4.195.jar!/org/h2/JdbcDriverBackwardsCompat.class 21:47:08.589 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: org.mariadb.jdbc.Driver - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/org.mariadb.jdbc/mariadb-java-client/jars/mariadb-java-client-2.5.4.jar!/org/mariadb/jdbc/Driver.class 21:47:08.590 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: java.sql.DriverAction 21:47:08.590 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: org.mariadb.jdbc.internal.util.DeRegister - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/org.mariadb.jdbc/mariadb-java-client/jars/mariadb-java-client-2.5.4.jar!/org/mariadb/jdbc/internal/util/DeRegister.class 21:47:08.591 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: org.postgresql.Driver 21:47:08.596 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: com.ibm.db2.jcc.DB2Driver - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/com.ibm.db2/jcc/jars/jcc-11.5.0.0.jar!/com/ibm/db2/jcc/DB2Driver.class 21:47:08.598 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: java.util.NoSuchElementException 21:47:08.600 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: com.ibm.db2.jcc.am.SqlFeatureNotSupportedException - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/com.ibm.db2/jcc/jars/jcc-11.5.0.0.jar!/com/ibm/db2/jcc/am/SqlFeatureNotSupportedException.class 21:47:08.600 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: com.ibm.db2.jcc.am.bq - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/com.ibm.db2/jcc/jars/jcc-11.5.0.0.jar!/com/ibm/db2/jcc/am/bq.class 21:47:08.601 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: com.ibm.db2.jcc.DB2Diagnosable - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/com.ibm.db2/jcc/jars/jcc-11.5.0.0.jar!/com/ibm/db2/jcc/DB2Diagnosable.class 21:47:08.603 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: com.ibm.db2.jcc.DB2BaseDataSource - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/com.ibm.db2/jcc/jars/jcc-11.5.0.0.jar!/com/ibm/db2/jcc/DB2BaseDataSource.class 21:47:08.607 pool-1-thread-1 DEBUG IsolatedClientLoader: hive class: com.ibm.db2.jcc.DB2JccDataSource - jar:file:/home/sparkivy/per-executor-caches/0/.ivy2/cache/com.ibm.db2/jcc/jars/jcc-11.5.0.0.jar!/com/ibm/db2/jcc/DB2JccDataSource.class 21:47:08.626 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: com.microsoft.sqlserver.jdbc.SQLServerDriver 21:47:08.801 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:08.801 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:08.806 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@514f9041 21:47:08.807 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:08.808 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:08.808 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:08.809 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:08.809 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:08.809 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:08.817 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslServerTransport: transport map does not contain key 21:47:08.817 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@31b4818b 21:47:08.818 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:08.818 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslServerTransport: Received start message with status START 21:47:08.818 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:08.820 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslTransport: SERVER: Start message handled 21:47:08.820 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:08.821 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:08.821 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:08.821 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@6a1f1288 21:47:08.821 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:08.828 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:08.829 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:08.831 HiveServer2-Handler-Pool: Thread-180 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:08.834 HiveServer2-Handler-Pool: Thread-180 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:08.834 HiveServer2-Handler-Pool: Thread-180 DEBUG ThriftCLIService: Client's username: jenkins 21:47:08.834 HiveServer2-Handler-Pool: Thread-180 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:08.884 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.884 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.886 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.886 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.887 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.898 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.899 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.903 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.904 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.904 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.904 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.904 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.905 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.905 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.905 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.906 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.937 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.938 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.938 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.938 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.939 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.939 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.939 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.940 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.941 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.941 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.941 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.942 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.942 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.943 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.943 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.944 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.945 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.945 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.947 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.955 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.956 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.957 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.958 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.958 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.959 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.959 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.966 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.966 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.967 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.967 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.968 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.968 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.969 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.969 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.969 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.974 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.979 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.979 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.980 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.980 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.981 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.981 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.982 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.982 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.988 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.989 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.989 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.990 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.990 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.990 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.991 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.991 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:08.992 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.003 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.004 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.005 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.005 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.005 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.006 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.006 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.006 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.023 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.024 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.024 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.025 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.025 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.026 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.026 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.027 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.028 HiveServer2-Handler-Pool: Thread-180 DEBUG Utilities: Hive Conf not found or Session not initiated, use thread based class loader instead 21:47:09.169 HiveServer2-Handler-Pool: Thread-180 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:09.172 HiveServer2-Handler-Pool: Thread-180 DEBUG TSaslTransport: writing data length: 3487 21:47:09.172 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:09.173 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:09.431 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:09.431 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:09.432 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@372c9eae 21:47:09.432 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:09.432 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:09.432 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:09.432 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:09.432 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:09.432 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:09.433 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslServerTransport: transport map does not contain key 21:47:09.433 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@760fd634 21:47:09.433 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:09.433 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslServerTransport: Received start message with status START 21:47:09.433 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslTransport: SERVER: Start message handled 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@78c8375d 21:47:09.434 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:09.434 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 DEBUG ThriftCLIService: Client's username: jenkins 21:47:09.434 HiveServer2-Handler-Pool: Thread-182 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:09.436 HiveServer2-Handler-Pool: Thread-182 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:09.437 HiveServer2-Handler-Pool: Thread-182 DEBUG TSaslTransport: writing data length: 3487 21:47:09.437 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:09.438 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:09.538 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:09.539 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:09.539 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@547b9a24 21:47:09.540 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:09.540 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:09.540 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:09.540 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:09.540 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:09.540 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:09.540 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslServerTransport: transport map does not contain key 21:47:09.540 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@547f4f54 21:47:09.540 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:09.540 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslServerTransport: Received start message with status START 21:47:09.541 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:09.541 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslTransport: SERVER: Start message handled 21:47:09.541 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:09.541 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:09.541 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:09.541 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@5ab8aa73 21:47:09.541 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:09.541 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:09.541 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:09.541 HiveServer2-Handler-Pool: Thread-183 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:09.541 HiveServer2-Handler-Pool: Thread-183 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:09.542 HiveServer2-Handler-Pool: Thread-183 DEBUG ThriftCLIService: Client's username: jenkins 21:47:09.542 HiveServer2-Handler-Pool: Thread-183 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:09.543 HiveServer2-Handler-Pool: Thread-183 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:09.544 HiveServer2-Handler-Pool: Thread-183 DEBUG TSaslTransport: writing data length: 3487 21:47:09.544 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:09.545 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:09.645 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:09.646 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:09.646 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@337a718 21:47:09.647 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:09.647 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:09.647 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:09.647 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:09.647 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:09.647 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:09.647 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslServerTransport: transport map does not contain key 21:47:09.647 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@9abd18a 21:47:09.647 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:09.647 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslServerTransport: Received start message with status START 21:47:09.647 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslTransport: SERVER: Start message handled 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:09.648 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@76c3f17 21:47:09.648 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 DEBUG ThriftCLIService: Client's username: jenkins 21:47:09.648 HiveServer2-Handler-Pool: Thread-184 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:09.650 HiveServer2-Handler-Pool: Thread-184 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:09.651 HiveServer2-Handler-Pool: Thread-184 DEBUG TSaslTransport: writing data length: 3487 21:47:09.651 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:09.651 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:10.652 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:10.652 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:10.653 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@2e4ca694 21:47:10.653 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:10.653 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:10.654 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:10.654 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:10.654 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:10.654 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:10.654 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslServerTransport: transport map does not contain key 21:47:10.654 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@394405d7 21:47:10.654 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:10.654 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslServerTransport: Received start message with status START 21:47:10.654 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:10.655 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslTransport: SERVER: Start message handled 21:47:10.655 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:10.655 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:10.655 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:10.655 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@3783fd57 21:47:10.655 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:10.655 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:10.655 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:10.655 HiveServer2-Handler-Pool: Thread-187 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:10.655 HiveServer2-Handler-Pool: Thread-187 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:10.656 HiveServer2-Handler-Pool: Thread-187 DEBUG ThriftCLIService: Client's username: jenkins 21:47:10.656 HiveServer2-Handler-Pool: Thread-187 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:10.657 HiveServer2-Handler-Pool: Thread-187 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:10.658 HiveServer2-Handler-Pool: Thread-187 DEBUG TSaslTransport: writing data length: 3487 21:47:10.658 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:10.659 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:11.659 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:11.660 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:11.660 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@7d46941d 21:47:11.660 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:11.661 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:11.661 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:11.661 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:11.661 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:11.661 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:11.661 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslServerTransport: transport map does not contain key 21:47:11.661 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@5b333d13 21:47:11.661 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:11.661 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslServerTransport: Received start message with status START 21:47:11.661 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:11.661 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslTransport: SERVER: Start message handled 21:47:11.661 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:11.661 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:11.662 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:11.662 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@55f4d9dd 21:47:11.662 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:11.662 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:11.662 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:11.662 HiveServer2-Handler-Pool: Thread-188 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:11.662 HiveServer2-Handler-Pool: Thread-188 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:11.662 HiveServer2-Handler-Pool: Thread-188 DEBUG ThriftCLIService: Client's username: jenkins 21:47:11.662 HiveServer2-Handler-Pool: Thread-188 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:11.665 HiveServer2-Handler-Pool: Thread-188 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:11.667 HiveServer2-Handler-Pool: Thread-188 DEBUG TSaslTransport: writing data length: 3487 21:47:11.667 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:11.667 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:12.668 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:12.668 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:12.668 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@75e49b2b 21:47:12.669 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:12.669 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:12.669 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:12.669 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:12.669 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:12.669 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:12.669 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslServerTransport: transport map does not contain key 21:47:12.669 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@f4b223 21:47:12.669 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:12.670 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslServerTransport: Received start message with status START 21:47:12.670 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:12.670 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslTransport: SERVER: Start message handled 21:47:12.670 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:12.670 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:12.670 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:12.670 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:12.670 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@e334d95 21:47:12.671 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:12.671 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:12.671 HiveServer2-Handler-Pool: Thread-189 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:12.671 HiveServer2-Handler-Pool: Thread-189 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:12.671 HiveServer2-Handler-Pool: Thread-189 DEBUG ThriftCLIService: Client's username: jenkins 21:47:12.671 HiveServer2-Handler-Pool: Thread-189 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:12.674 HiveServer2-Handler-Pool: Thread-189 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:12.675 HiveServer2-Handler-Pool: Thread-189 DEBUG TSaslTransport: writing data length: 3487 21:47:12.675 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:12.676 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:13.676 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:13.676 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:13.677 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@4bd5ede8 21:47:13.677 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:13.677 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:13.677 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:13.677 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:13.677 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:13.677 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:13.677 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslServerTransport: transport map does not contain key 21:47:13.677 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@52dfa11a 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslServerTransport: Received start message with status START 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslTransport: SERVER: Start message handled 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@5c1844e9 21:47:13.678 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:13.678 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG ThriftCLIService: Client's username: jenkins 21:47:13.678 HiveServer2-Handler-Pool: Thread-190 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:13.680 HiveServer2-Handler-Pool: Thread-190 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:13.680 HiveServer2-Handler-Pool: Thread-190 DEBUG TSaslTransport: writing data length: 3487 21:47:13.680 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:13.681 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:14.682 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:14.682 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:14.682 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@e68a00 21:47:14.683 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:14.683 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:14.683 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:14.683 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:14.683 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:14.683 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:14.683 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslServerTransport: transport map does not contain key 21:47:14.683 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@53c95873 21:47:14.683 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:14.684 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslServerTransport: Received start message with status START 21:47:14.684 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:14.684 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslTransport: SERVER: Start message handled 21:47:14.684 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:14.684 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:14.684 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:14.684 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:14.684 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@5d0ebda2 21:47:14.684 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:14.685 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:14.685 HiveServer2-Handler-Pool: Thread-191 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:14.685 HiveServer2-Handler-Pool: Thread-191 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:14.685 HiveServer2-Handler-Pool: Thread-191 DEBUG ThriftCLIService: Client's username: jenkins 21:47:14.685 HiveServer2-Handler-Pool: Thread-191 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:14.686 HiveServer2-Handler-Pool: Thread-191 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:14.687 HiveServer2-Handler-Pool: Thread-191 DEBUG TSaslTransport: writing data length: 3487 21:47:14.687 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:14.687 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:15.688 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:15.688 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:15.688 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@49c9007d 21:47:15.689 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:15.689 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:15.689 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:15.689 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:15.689 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:15.689 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:15.689 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslServerTransport: transport map does not contain key 21:47:15.689 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@2e334bf6 21:47:15.690 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:15.690 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslServerTransport: Received start message with status START 21:47:15.690 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:15.690 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslTransport: SERVER: Start message handled 21:47:15.690 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:15.690 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:15.690 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:15.690 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@20f4b198 21:47:15.690 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:15.690 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:15.690 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:15.691 HiveServer2-Handler-Pool: Thread-192 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:15.691 HiveServer2-Handler-Pool: Thread-192 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:15.691 HiveServer2-Handler-Pool: Thread-192 DEBUG ThriftCLIService: Client's username: jenkins 21:47:15.691 HiveServer2-Handler-Pool: Thread-192 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:15.694 HiveServer2-Handler-Pool: Thread-192 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:15.695 HiveServer2-Handler-Pool: Thread-192 DEBUG TSaslTransport: writing data length: 3487 21:47:15.695 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:15.695 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:16.696 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:16.696 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:16.696 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@1825f7ac 21:47:16.696 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:16.696 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:16.696 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:16.697 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:16.697 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:16.697 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:16.697 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslServerTransport: transport map does not contain key 21:47:16.697 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@52a64247 21:47:16.697 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:16.697 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslServerTransport: Received start message with status START 21:47:16.697 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslTransport: SERVER: Start message handled 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:16.698 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@5c61a7bb 21:47:16.698 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 DEBUG ThriftCLIService: Client's username: jenkins 21:47:16.698 HiveServer2-Handler-Pool: Thread-193 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:16.701 HiveServer2-Handler-Pool: Thread-193 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:16.702 HiveServer2-Handler-Pool: Thread-193 DEBUG TSaslTransport: writing data length: 3487 21:47:16.702 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:16.702 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:17.703 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:17.703 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:17.703 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@7cfbf3ed 21:47:17.704 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:17.704 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:17.704 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:17.704 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:17.704 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:17.704 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:17.704 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslServerTransport: transport map does not contain key 21:47:17.704 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@1ca2be5e 21:47:17.704 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:17.704 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslServerTransport: Received start message with status START 21:47:17.704 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslTransport: SERVER: Start message handled 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@12654262 21:47:17.705 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:17.705 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 DEBUG ThriftCLIService: Client's username: jenkins 21:47:17.705 HiveServer2-Handler-Pool: Thread-194 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:17.707 HiveServer2-Handler-Pool: Thread-194 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:17.707 HiveServer2-Handler-Pool: Thread-194 DEBUG TSaslTransport: writing data length: 3487 21:47:17.708 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:17.708 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:18.708 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:18.709 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:18.709 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@48f9d0f8 21:47:18.709 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:18.709 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:18.709 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:18.709 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:18.710 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:18.710 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslServerTransport: transport map does not contain key 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@57e03d9d 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslServerTransport: Received start message with status START 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslTransport: SERVER: Start message handled 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:18.710 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@5927d8c9 21:47:18.710 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG ThriftCLIService: Client's username: jenkins 21:47:18.710 HiveServer2-Handler-Pool: Thread-195 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:18.712 HiveServer2-Handler-Pool: Thread-195 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:18.712 HiveServer2-Handler-Pool: Thread-195 DEBUG TSaslTransport: writing data length: 3487 21:47:18.712 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:18.713 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:19.713 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:19.714 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:19.715 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@4932fb72 21:47:19.715 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:19.715 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:19.715 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:19.716 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:19.716 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:19.716 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:19.716 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslServerTransport: transport map does not contain key 21:47:19.716 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@225fee37 21:47:19.716 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:19.716 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslServerTransport: Received start message with status START 21:47:19.716 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:19.717 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslTransport: SERVER: Start message handled 21:47:19.717 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:19.717 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:19.717 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:19.717 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@2c9815d4 21:47:19.717 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:19.717 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:19.717 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:19.717 HiveServer2-Handler-Pool: Thread-196 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:19.718 HiveServer2-Handler-Pool: Thread-196 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:19.718 HiveServer2-Handler-Pool: Thread-196 DEBUG ThriftCLIService: Client's username: jenkins 21:47:19.718 HiveServer2-Handler-Pool: Thread-196 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:19.720 HiveServer2-Handler-Pool: Thread-196 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:19.722 HiveServer2-Handler-Pool: Thread-196 DEBUG TSaslTransport: writing data length: 3487 21:47:19.722 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:19.723 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:20.723 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:20.724 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:20.725 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@7e1dfd6f 21:47:20.725 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:20.725 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:20.725 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:20.725 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:20.725 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:20.725 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslServerTransport: transport map does not contain key 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@3e6dbdb8 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslServerTransport: Received start message with status START 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslTransport: SERVER: Start message handled 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:20.726 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@30aa55dd 21:47:20.727 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:20.727 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:20.727 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:20.727 HiveServer2-Handler-Pool: Thread-197 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:20.727 HiveServer2-Handler-Pool: Thread-197 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:20.727 HiveServer2-Handler-Pool: Thread-197 DEBUG ThriftCLIService: Client's username: jenkins 21:47:20.727 HiveServer2-Handler-Pool: Thread-197 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:20.730 HiveServer2-Handler-Pool: Thread-197 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:20.732 HiveServer2-Handler-Pool: Thread-197 DEBUG TSaslTransport: writing data length: 3487 21:47:20.732 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:20.733 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:21.734 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:21.734 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:21.735 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@5ba26899 21:47:21.736 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:21.736 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:21.736 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:21.736 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:21.736 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:21.736 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:21.736 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslServerTransport: transport map does not contain key 21:47:21.736 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@3e23284a 21:47:21.737 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:21.737 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslServerTransport: Received start message with status START 21:47:21.737 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:21.737 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslTransport: SERVER: Start message handled 21:47:21.737 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:21.737 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:21.738 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:21.738 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:21.738 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@30e4d026 21:47:21.738 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:21.738 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:21.738 HiveServer2-Handler-Pool: Thread-198 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:21.738 HiveServer2-Handler-Pool: Thread-198 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:21.738 HiveServer2-Handler-Pool: Thread-198 DEBUG ThriftCLIService: Client's username: jenkins 21:47:21.738 HiveServer2-Handler-Pool: Thread-198 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:21.741 HiveServer2-Handler-Pool: Thread-198 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:21.742 HiveServer2-Handler-Pool: Thread-198 DEBUG TSaslTransport: writing data length: 3487 21:47:21.742 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:21.744 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:22.745 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:22.745 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:22.745 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@7bd57dac 21:47:22.745 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:22.745 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:22.745 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:22.745 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:22.746 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:22.746 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslServerTransport: transport map does not contain key 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@1526c710 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslServerTransport: Received start message with status START 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslTransport: SERVER: Start message handled 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@1be13a1c 21:47:22.746 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:22.746 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG ThriftCLIService: Client's username: jenkins 21:47:22.746 HiveServer2-Handler-Pool: Thread-199 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:22.748 HiveServer2-Handler-Pool: Thread-199 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:22.749 HiveServer2-Handler-Pool: Thread-199 DEBUG TSaslTransport: writing data length: 3487 21:47:22.749 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:22.750 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:23.750 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:23.750 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:23.751 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@7bb2e798 21:47:23.751 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:23.751 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:23.751 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:23.751 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:23.751 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:23.751 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslServerTransport: transport map does not contain key 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@10f78f40 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslServerTransport: Received start message with status START 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslTransport: SERVER: Start message handled 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:23.751 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@1e3062f6 21:47:23.752 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:23.752 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:23.752 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:23.752 HiveServer2-Handler-Pool: Thread-200 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:23.752 HiveServer2-Handler-Pool: Thread-200 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:23.752 HiveServer2-Handler-Pool: Thread-200 DEBUG ThriftCLIService: Client's username: jenkins 21:47:23.752 HiveServer2-Handler-Pool: Thread-200 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:23.753 HiveServer2-Handler-Pool: Thread-200 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:23.753 HiveServer2-Handler-Pool: Thread-200 DEBUG TSaslTransport: writing data length: 3487 21:47:23.753 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:23.754 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:24.754 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:24.754 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:24.755 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@3df63b9a 21:47:24.755 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:24.755 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:24.755 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:24.755 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:24.755 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:24.755 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:24.755 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslServerTransport: transport map does not contain key 21:47:24.755 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@3ca7f583 21:47:24.755 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:24.755 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslServerTransport: Received start message with status START 21:47:24.755 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslTransport: SERVER: Start message handled 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:24.756 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@32601dbf 21:47:24.756 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 DEBUG ThriftCLIService: Client's username: jenkins 21:47:24.756 HiveServer2-Handler-Pool: Thread-201 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:24.757 HiveServer2-Handler-Pool: Thread-201 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:24.759 HiveServer2-Handler-Pool: Thread-201 DEBUG TSaslTransport: writing data length: 3487 21:47:24.759 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:24.759 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:25.760 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:25.760 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:25.761 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@418eac05 21:47:25.761 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:25.761 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:25.761 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:25.761 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:25.761 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:25.761 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:25.761 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslServerTransport: transport map does not contain key 21:47:25.761 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@1037c3ba 21:47:25.761 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:25.761 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslServerTransport: Received start message with status START 21:47:25.761 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslTransport: SERVER: Start message handled 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:25.762 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@1ef02e14 21:47:25.762 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 DEBUG ThriftCLIService: Client's username: jenkins 21:47:25.762 HiveServer2-Handler-Pool: Thread-202 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:25.763 HiveServer2-Handler-Pool: Thread-202 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:25.766 HiveServer2-Handler-Pool: Thread-202 DEBUG TSaslTransport: writing data length: 3487 21:47:25.766 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:25.766 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:26.767 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:26.767 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:26.767 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@11e69185 21:47:26.768 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:26.768 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:26.768 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:26.768 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:26.768 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:26.768 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:26.768 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslServerTransport: transport map does not contain key 21:47:26.768 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@5a11028 21:47:26.768 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:26.768 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslServerTransport: Received start message with status START 21:47:26.768 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:26.768 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslTransport: SERVER: Start message handled 21:47:26.769 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:26.769 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:26.769 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:26.769 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@b66ef34 21:47:26.769 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:26.769 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:26.769 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:26.769 HiveServer2-Handler-Pool: Thread-203 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:26.769 HiveServer2-Handler-Pool: Thread-203 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:26.769 HiveServer2-Handler-Pool: Thread-203 DEBUG ThriftCLIService: Client's username: jenkins 21:47:26.769 HiveServer2-Handler-Pool: Thread-203 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:26.770 HiveServer2-Handler-Pool: Thread-203 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:26.771 HiveServer2-Handler-Pool: Thread-203 DEBUG TSaslTransport: writing data length: 3487 21:47:26.771 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:26.771 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:27.771 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:27.772 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:27.772 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@7d042a06 21:47:27.772 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:27.772 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:27.772 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:27.772 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:27.772 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:27.772 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslServerTransport: transport map does not contain key 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@6d8c3c29 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslServerTransport: Received start message with status START 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslTransport: SERVER: Start message handled 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:27.773 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@634bce30 21:47:27.773 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:27.773 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:27.774 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:27.774 HiveServer2-Handler-Pool: Thread-204 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:27.774 HiveServer2-Handler-Pool: Thread-204 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:27.774 HiveServer2-Handler-Pool: Thread-204 DEBUG ThriftCLIService: Client's username: jenkins 21:47:27.774 HiveServer2-Handler-Pool: Thread-204 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:27.775 HiveServer2-Handler-Pool: Thread-204 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:27.776 HiveServer2-Handler-Pool: Thread-204 DEBUG TSaslTransport: writing data length: 3487 21:47:27.776 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:27.776 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:28.777 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:28.777 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:28.777 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@71a2f4d7 21:47:28.777 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:28.777 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:28.777 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:28.778 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:28.778 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:28.778 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:28.778 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslServerTransport: transport map does not contain key 21:47:28.778 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@4d186b12 21:47:28.778 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:28.778 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslServerTransport: Received start message with status START 21:47:28.778 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:28.778 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslTransport: SERVER: Start message handled 21:47:28.779 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:28.779 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:28.779 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:28.779 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@43df9d19 21:47:28.779 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:28.779 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:28.779 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:28.779 HiveServer2-Handler-Pool: Thread-205 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:28.779 HiveServer2-Handler-Pool: Thread-205 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:28.779 HiveServer2-Handler-Pool: Thread-205 DEBUG ThriftCLIService: Client's username: jenkins 21:47:28.779 HiveServer2-Handler-Pool: Thread-205 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:28.781 HiveServer2-Handler-Pool: Thread-205 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:28.783 HiveServer2-Handler-Pool: Thread-205 DEBUG TSaslTransport: writing data length: 3487 21:47:28.783 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:28.784 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:29.784 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:29.784 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:29.785 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@45f89062 21:47:29.785 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:29.785 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:29.785 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:29.785 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:29.785 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:29.785 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:29.785 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslServerTransport: transport map does not contain key 21:47:29.785 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@4aedbfa7 21:47:29.785 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:29.785 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslServerTransport: Received start message with status START 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslTransport: SERVER: Start message handled 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@442a4952 21:47:29.786 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:29.786 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG ThriftCLIService: Client's username: jenkins 21:47:29.786 HiveServer2-Handler-Pool: Thread-206 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:29.787 HiveServer2-Handler-Pool: Thread-206 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:29.788 HiveServer2-Handler-Pool: Thread-206 DEBUG TSaslTransport: writing data length: 3487 21:47:29.788 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:29.788 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:30.788 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:30.789 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:30.789 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@60375be1 21:47:30.789 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:30.789 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:30.790 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:30.790 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:30.790 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:30.790 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslServerTransport: transport map does not contain key 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@7ced1b42 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslServerTransport: Received start message with status START 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslTransport: SERVER: Start message handled 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@722afb77 21:47:30.790 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:30.790 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG ThriftCLIService: Client's username: jenkins 21:47:30.790 HiveServer2-Handler-Pool: Thread-207 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:30.791 HiveServer2-Handler-Pool: Thread-207 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:30.792 HiveServer2-Handler-Pool: Thread-207 DEBUG TSaslTransport: writing data length: 3487 21:47:30.792 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:30.793 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:31.793 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:31.793 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:31.794 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@3cbf19fb 21:47:31.794 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:31.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:31.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:31.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:31.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:31.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslServerTransport: transport map does not contain key 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@2d448a73 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslServerTransport: Received start message with status START 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslTransport: SERVER: Start message handled 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:31.796 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@23836073 21:47:31.796 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:31.797 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:31.797 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:31.797 HiveServer2-Handler-Pool: Thread-208 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:31.797 HiveServer2-Handler-Pool: Thread-208 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:31.797 HiveServer2-Handler-Pool: Thread-208 DEBUG ThriftCLIService: Client's username: jenkins 21:47:31.797 HiveServer2-Handler-Pool: Thread-208 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:31.798 HiveServer2-Handler-Pool: Thread-208 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:31.799 HiveServer2-Handler-Pool: Thread-208 DEBUG TSaslTransport: writing data length: 3487 21:47:31.799 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:31.800 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:32.800 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:32.801 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:32.801 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@5add46f7 21:47:32.802 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:32.802 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:32.802 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:32.802 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:32.802 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:32.802 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslServerTransport: transport map does not contain key 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@6a47233d 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslServerTransport: Received start message with status START 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslTransport: SERVER: Start message handled 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:32.803 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@6480c1dd 21:47:32.804 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:32.804 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:32.804 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:32.804 HiveServer2-Handler-Pool: Thread-209 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:32.804 HiveServer2-Handler-Pool: Thread-209 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:32.804 HiveServer2-Handler-Pool: Thread-209 DEBUG ThriftCLIService: Client's username: jenkins 21:47:32.804 HiveServer2-Handler-Pool: Thread-209 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:32.806 HiveServer2-Handler-Pool: Thread-209 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:32.807 HiveServer2-Handler-Pool: Thread-209 DEBUG TSaslTransport: writing data length: 3487 21:47:32.807 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:32.807 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:33.808 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:33.808 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:33.809 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@5ba7a64e 21:47:33.809 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:33.809 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:33.810 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:33.810 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:33.810 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:33.810 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:33.810 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslServerTransport: transport map does not contain key 21:47:33.810 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@66f49436 21:47:33.811 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:33.811 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslServerTransport: Received start message with status START 21:47:33.811 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:33.811 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslTransport: SERVER: Start message handled 21:47:33.811 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:33.811 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:33.811 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:33.811 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@68336740 21:47:33.811 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:33.811 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:33.811 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:33.812 HiveServer2-Handler-Pool: Thread-210 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:33.812 HiveServer2-Handler-Pool: Thread-210 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:33.812 HiveServer2-Handler-Pool: Thread-210 DEBUG ThriftCLIService: Client's username: jenkins 21:47:33.812 HiveServer2-Handler-Pool: Thread-210 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:33.814 HiveServer2-Handler-Pool: Thread-210 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:33.815 HiveServer2-Handler-Pool: Thread-210 DEBUG TSaslTransport: writing data length: 3487 21:47:33.815 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:33.816 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:34.816 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:34.817 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:34.817 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@1950688 21:47:34.818 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:34.818 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:34.818 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:34.818 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:34.818 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:34.818 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:34.818 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslServerTransport: transport map does not contain key 21:47:34.818 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@56baa04b 21:47:34.818 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:34.818 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslServerTransport: Received start message with status START 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslTransport: SERVER: Start message handled 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@587d4534 21:47:34.819 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:34.819 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG ThriftCLIService: Client's username: jenkins 21:47:34.819 HiveServer2-Handler-Pool: Thread-211 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:34.821 HiveServer2-Handler-Pool: Thread-211 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:34.821 HiveServer2-Handler-Pool: Thread-211 DEBUG TSaslTransport: writing data length: 3487 21:47:34.822 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:34.822 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:35.823 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:35.823 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:35.824 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@45a1b780 21:47:35.824 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:35.824 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:35.824 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:35.824 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:35.824 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:35.824 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:35.825 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslServerTransport: transport map does not contain key 21:47:35.825 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@7f1fcb2d 21:47:35.825 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:35.825 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslServerTransport: Received start message with status START 21:47:35.825 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:35.825 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslTransport: SERVER: Start message handled 21:47:35.825 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:35.825 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:35.825 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:35.826 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:35.826 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@71d83b66 21:47:35.826 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:35.826 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:35.826 HiveServer2-Handler-Pool: Thread-212 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:35.826 HiveServer2-Handler-Pool: Thread-212 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:35.826 HiveServer2-Handler-Pool: Thread-212 DEBUG ThriftCLIService: Client's username: jenkins 21:47:35.826 HiveServer2-Handler-Pool: Thread-212 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:35.827 HiveServer2-Handler-Pool: Thread-212 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:35.828 HiveServer2-Handler-Pool: Thread-212 DEBUG TSaslTransport: writing data length: 3487 21:47:35.828 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:35.828 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:36.829 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:36.829 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:36.829 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@757bab3d 21:47:36.830 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:36.830 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:36.830 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:36.830 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:36.830 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:36.830 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:36.830 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslServerTransport: transport map does not contain key 21:47:36.831 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@44a73afb 21:47:36.831 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:36.831 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslServerTransport: Received start message with status START 21:47:36.831 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:36.831 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslTransport: SERVER: Start message handled 21:47:36.831 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:36.831 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:36.831 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:36.831 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@4a60cdc7 21:47:36.831 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:36.831 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:36.832 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:36.832 HiveServer2-Handler-Pool: Thread-213 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:36.832 HiveServer2-Handler-Pool: Thread-213 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:36.832 HiveServer2-Handler-Pool: Thread-213 DEBUG ThriftCLIService: Client's username: jenkins 21:47:36.832 HiveServer2-Handler-Pool: Thread-213 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:36.834 HiveServer2-Handler-Pool: Thread-213 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:36.835 HiveServer2-Handler-Pool: Thread-213 DEBUG TSaslTransport: writing data length: 3487 21:47:36.835 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:36.836 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:37.836 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:37.837 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:37.837 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@1308e60a 21:47:37.837 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:37.837 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:37.838 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:37.838 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:37.838 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:37.838 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslServerTransport: transport map does not contain key 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@79d6198 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslServerTransport: Received start message with status START 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslTransport: SERVER: Start message handled 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:37.838 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@95a9ec3 21:47:37.838 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:37.839 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:37.839 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:37.839 HiveServer2-Handler-Pool: Thread-214 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:37.839 HiveServer2-Handler-Pool: Thread-214 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:37.839 HiveServer2-Handler-Pool: Thread-214 DEBUG ThriftCLIService: Client's username: jenkins 21:47:37.839 HiveServer2-Handler-Pool: Thread-214 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:37.840 HiveServer2-Handler-Pool: Thread-214 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:37.841 HiveServer2-Handler-Pool: Thread-214 DEBUG TSaslTransport: writing data length: 3487 21:47:37.841 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:37.841 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:38.842 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:41409 21:47:38.842 pool-1-thread-1 INFO Utils: Resolved authority: localhost:41409 21:47:38.842 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@3a0013a5 21:47:38.843 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:38.843 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:38.843 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:38.843 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:38.843 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:38.843 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:38.843 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslServerTransport: transport map does not contain key 21:47:38.843 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@624ace1e 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslServerTransport: Received start message with status START 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslTransport: SERVER: Start message handled 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@6c8d2dff 21:47:38.844 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:38.844 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG ThriftCLIService: Client's username: jenkins 21:47:38.844 HiveServer2-Handler-Pool: Thread-215 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:38.845 HiveServer2-Handler-Pool: Thread-215 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:38.846 HiveServer2-Handler-Pool: Thread-215 DEBUG TSaslTransport: writing data length: 3487 21:47:38.846 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:38.847 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:41409 21:47:38.851 pool-1-thread-1 ERROR ThriftServerWithSparkContextInBinarySuite: Error start hive server with Context org.scalatest.exceptions.TestFailedDueToTimeoutException: The code passed to eventually never returned normally. Attempted 34 times over 30.267685124000003 seconds. Last failure message: Could not open client transport with JDBC Uri: jdbc:hive2://localhost:41409/: Failed to setup metastore connection. at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432) at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439) at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391) at org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite.eventually(ThriftServerWithSparkContextSuite.scala:51) at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308) at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307) at org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite.eventually(ThriftServerWithSparkContextSuite.scala:51) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.startThriftServer(SharedThriftServer.scala:121) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$beforeAll$1(SharedThriftServer.scala:52) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at scala.util.Try$.apply(Try.scala:213) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.beforeAll(SharedThriftServer.scala:52) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.beforeAll$(SharedThriftServer.scala:49) at org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite.beforeAll(ThriftServerWithSparkContextSuite.scala:51) at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212) at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:59) at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317) at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510) at sbt.ForkMain$Run$2.call(ForkMain.java:296) at sbt.ForkMain$Run$2.call(ForkMain.java:286) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.sql.SQLException: Could not open client transport with JDBC Uri: jdbc:hive2://localhost:41409/: Failed to setup metastore connection at org.apache.hive.jdbc.HiveConnection.(HiveConnection.java:224) at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107) at java.sql.DriverManager.getConnection(DriverManager.java:664) at java.sql.DriverManager.getConnection(DriverManager.java:247) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$withJdbcStatement$2(SharedThriftServer.scala:83) at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238) at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36) at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33) at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38) at scala.collection.TraversableLike.map(TraversableLike.scala:238) at scala.collection.TraversableLike.map$(TraversableLike.scala:231) at scala.collection.AbstractTraversable.map(Traversable.scala:108) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.withJdbcStatement(SharedThriftServer.scala:83) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.withJdbcStatement$(SharedThriftServer.scala:79) at org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite.withJdbcStatement(ThriftServerWithSparkContextSuite.scala:51) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$startThriftServer$4(SharedThriftServer.scala:122) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395) at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409) ... 25 more Caused by: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:267) at org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:258) at org.apache.hive.jdbc.HiveConnection.openSession(HiveConnection.java:683) at org.apache.hive.jdbc.HiveConnection.(HiveConnection.java:200) ... 43 more Caused by: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) ... 3 more Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:38.852 pool-1-thread-1 INFO HiveServer2: Shutting down HiveServer2 21:47:38.852 pool-1-thread-1 INFO ThriftCLIService: Thrift server has stopped 21:47:38.852 pool-1-thread-1 INFO AbstractService: Service:ThriftBinaryCLIService is stopped. 21:47:38.852 pool-1-thread-1 INFO AbstractService: Service:OperationManager is stopped. 21:47:38.852 pool-1-thread-1 INFO AbstractService: Service:SessionManager is stopped. 21:47:38.853 pool-1-thread-1 INFO AbstractService: Service:CLIService is stopped. 21:47:38.853 pool-1-thread-1 INFO AbstractService: Service:HiveServer2 is stopped. 21:47:38.854 pool-1-thread-1 INFO ThriftServerWithSparkContextInBinarySuite: Trying to start HiveThriftServer2: mode=binary, attempt=1 21:47:38.856 pool-1-thread-1 INFO HiveUtils: Initializing execution hive, version 2.3.7 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.service.shutdown.timeout=30s 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.amlauncher.thread-count=50 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.enabled=false 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.maximum=15 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.numa-awareness.numactl.cmd=/usr/bin/numactl 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.scheduler.heartbeat.interval-ms=1000 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.timeline-client.number-of-async-entities-to-merge=10 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.timeout=60 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.kerberos.principal=HTTP/_HOST@LOCALHOST 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.loadedjob.tasks.max=-1 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.framework.name=local 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.uploader.server.thread-count=50 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.nonsecure-mode.user-pattern=^[_.A-Za-z0-9][-@_.A-Za-z0-9]{0,255}?[$]?$ 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:tfile.fs.output.buffer.size=262144 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.job.task.listener.thread-count=30 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.port.maxRetries=100 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.background.reload.threads=3 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.cross-origin.enabled=false 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.ftp.impl=org.apache.hadoop.fs.ftp.FtpFs 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.secure=false 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.shell.safely.delete.limit.num.files=100 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:dfs.bytes-per-checksum=512 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.acl-view-job= 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.background.sleep=25ms 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.limit=${fs.s3a.attempts.maximum} 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.loadedjobs.cache.size=5 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.table.create=false 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.enabled=false 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.with-user-dir=false 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.input.fileinputformat.split.minsize=0 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.container.liveness-monitor.interval-ms=600000 21:47:38.889 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.client.thread-count=50 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.seqfile.compress.blocksize=1000000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.checksum.algo.impl=org.apache.hadoop.yarn.sharedcache.ChecksumSHA256Impl 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.interceptor-class.pipeline=org.apache.hadoop.yarn.server.nodemanager.amrmproxy.DefaultRequestInterceptor 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.leveldb-cache-read-cache-size=10485760 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.fetch.retry.interval-ms=1000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.warehouse.dir=file:/home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-2efb422e-11e4-4316-be48-e21be59c3ab1/metastore 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.maps=0-2 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.include-port-in-node-name=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.admin-env=MALLOC_ARENA_MAX=$MALLOC_ARENA_MAX 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.node-removal-untracked.timeout-ms=60000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.am.max-attempts=2 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.failover.sleep.base.millis=100 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.https.address=0.0.0.0:19890 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-labels.fs-store.impl.class=org.apache.hadoop.yarn.nodelabels.FileSystemNodeLabelsStore 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.collector-service.address=${yarn.nodemanager.hostname}:8048 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.trash.checkpoint.interval=0 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.rdbms.datastoreAdapterClassName=org.datanucleus.store.rdbms.adapter.DerbyAdapter 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.map.output.collector.class=org.apache.hadoop.mapred.MapTask$MapOutputBuffer 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.node-ip-cache.expiry-interval-secs=-1 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.signature.secret.file=*********(redacted) 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.jetty.logs.serve.aliases=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.handler=disabled 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.handler-thread-count=10 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.max-completed-applications=1000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.system-metrics-publisher.enabled=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.algorithm.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.algorithm.DefaultPlacementAlgorithm 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.webapp.address=0.0.0.0:8788 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation.token.renew-interval=*********(redacted) 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.nm.uploader.replication.factor=10 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.identifierFactory=datanucleus1 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.negative-cache.secs=30 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.task.container.log.backups=0 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.skip.proc-count.auto-incr=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.posix.attr.gid.name=gidNumber 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.driver.host=amp-jenkins-worker-04.amp 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.fallback-to-simple-auth-allowed=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.enforced=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.DetachAllOnCommit=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.failover-proxy-provider=org.apache.hadoop.yarn.client.ConfiguredRMFailoverProxyProvider 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.http-authentication.simple.anonymous.allowed=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.check-interval.ms=1000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.acl.reservation-enable=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.store.class=org.apache.hadoop.yarn.server.resourcemanager.recovery.FileSystemRMStateStore 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.hard-kill-timeout-ms=10000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.etag.checksum.enabled=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-metrics.enable=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.fd-clean-interval-secs=60 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanagers.heartbeat-interval-ms=1000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.common.configuration.version=3.0.0 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.table.capacity.read=500 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.remote-app-log-dir-suffix=logs 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.windows-container.cpu-limit.enabled=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.privileged-containers.allowed=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.blocksize=67108864 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.retry.ceiling.ms=60000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.leveldb-store.path=${hadoop.tmp.dir}/yarn/system/confstore 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.in-memory.initial-delay-mins=10 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.principal=jhs/_HOST@REALM.TLD 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.skip.proc-count.auto-incr=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.name=file 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.reduces=0-2 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.num-retries=1000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.xfs-filter.enabled=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:seq.io.sort.mb=100 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.max.version=100 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.https.address=${yarn.timeline-service.hostname}:8190 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.NonTransactionalRead=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.address=${yarn.resourcemanager.hostname}:8030 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-labels.enabled=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.ui-actions.enabled=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.timeout=600000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.client-server.thread-count=50 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.shell.command.timeout=0s 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.crypto.cipher.suite=AES/CTR/NoPadding 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.elastic-memory-control.oom-handler=org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.DefaultOOMHandler 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.connect.max-wait.ms=900000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.defaultFS=file:/// 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.use-rpc=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.transactionIsolation=read-committed 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.har.impl.disable.cache=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.ui2.enable=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.compression.codec.bzip2.library=system-native 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.distributed-scheduling.enabled=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.connection-keep-alive.timeout=5 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.https.address=${yarn.resourcemanager.hostname}:8090 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.address=0.0.0.0:10020 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-tokens.master-key-rolling-interval-secs=*********(redacted) 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.is.minicluster=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.address=${yarn.nodemanager.hostname}:0 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.abfss.impl=org.apache.hadoop.fs.azurebfs.SecureAzureBlobFileSystem 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.s3a.impl=org.apache.hadoop.fs.s3a.S3A 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.combine.progress.records=10000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.epoch.range=0 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.am.max-attempts=2 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.validateTables=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.hierarchy=/hadoop-yarn 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.wasbs.impl=org.apache.hadoop.fs.azure.Wasbs 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.cache-store-class=org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.transfer.mode=BLOCK_TRANSFER_MODE 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.start.cleanup.scratchdir=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.server.log.slow.rpc=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.cache.level2=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.node-labels.provider.fetch-interval-ms=1800000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.webapp.https.address=0.0.0.0:8091 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.testing=true 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.cross-origin.enabled=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.wasb.impl=org.apache.hadoop.fs.azure.NativeAzureFileSystem 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.auto-update.containers=false 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.job.committer.cancel-timeout=60000 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.zk-store.parent-path=/confstore 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.default-container-executor.log-dirs.permissions=710 21:47:38.890 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.attempt.diagnostics.limit.kc=64 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.bytes-per-checksum=512 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory-mb=-1 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.driver.port=38658 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.abfs.impl=org.apache.hadoop.fs.azurebfs.Abfs 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.writer.flush-interval-seconds=60 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.fast.upload.active.blocks=4 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.credential.clear-text-fallback=true 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.collector-service.thread-count=5 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.secure.mode=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.joblist.cache.size=20000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.pre.event.listeners= 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.host=0.0.0.0 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fs.state-store.num-retries=0 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanager-connect-retries=10 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.num-log-files-per-app=30 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.low-watermark=0.3f 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.magic.enabled=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.max-retries=30 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:dfs.ha.fencing.ssh.connect-timeout=30000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation-enable=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.system-metrics-publisher.enabled=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.markreset.buffer.percent=0.0 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.viewfs.impl=org.apache.hadoop.fs.viewfs.ViewFs 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.io.sort.factor=10 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.client.thread-count=25 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.new-active.rpc-timeout.ms=60000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-localizer.java.opts=-Xmx256m 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.datestring.cache.size=200000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.acl-modify-job= 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.windows-container.memory-limit.enabled=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.address=${yarn.timeline-service.hostname}:8188 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.PersistenceManagerFactoryClass=org.datanucleus.api.jdo.JDOPersistenceManagerFactory 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.job.committer.commit-window=10000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-manager.thread-count=20 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.fixed.ports=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.tags.system=YARN,HDFS,NAMENODE,DATANODE,REQUIRED,SECURITY,KERBEROS,PERFORMANCE,CLIENT ,SERVER,DEBUG,DEPRECATED,COMMON,OPTIONAL 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.integral.jdo.pushdown=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.cluster.max-application-priority=0 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.ttl-enable=true 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.store.fs.uri=${hadoop.tmp.dir}/mapred/history/recoverystore 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.caller.context.signature.max.size=40 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.load.resource-types.from-server=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.zookeeper.session-timeout.ms=10000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.hadoop.fs.file.impl=org.apache.spark.DebugFilesystem 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:tfile.io.chunk.size=1048576 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.table.capacity.write=100 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.slowtaskthreshold=1.0 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization, org.apache.hadoop.io.serializer.avro.AvroReflectSerialization 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.failover.sleep.max.millis=2000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.directory.search.timeout=10000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.warehouse.dir=file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/spark-warehouse/org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.store.max-logs=1000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-attributes.provider.fetch-interval-ms=600000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.swift.impl=org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.local-cache.max-files-per-directory=8192 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.enabled=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.acl=world:anyone:rwcda 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.sort.spill.percent=0.80 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.scan-interval-seconds=60 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-attribute.fs-store.impl.class=org.apache.hadoop.yarn.server.resourcemanager.nodelabels.FileSystemNodeAttributeStore 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.interval=500ms 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.best-effort=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled=*********(redacted) 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.posix.attr.uid.name=uidNumber 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.swebhdfs.impl=org.apache.hadoop.fs.SWebHdfs 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.elastic-memory-control.timeout-sec=5 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.ifile.readahead=true 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms=300000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.reader.webapp.address=${yarn.timeline-service.webapp.address} 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.algorithm.pool-size=1 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hbase.coprocessor.jar.hdfs.location=/hbase/coprocessor/hadoop-yarn-server-timelineservice.jar 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.num.refill.threads=2 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.command-opts=-Xmx1024m 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.cluster.local.dir=${hadoop.tmp.dir}/mapred/local 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.mapfile.bloom.error.rate=0.005 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.client.resolve.topology.enabled=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.allowed-runtimes=default 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.class=org.apache.hadoop.yarn.server.sharedcachemanager.store.InMemorySCMStore 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.graceful-fence.rpc-timeout.ms=5000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.replication=3 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.uid.cache.secs=14400 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.maxtaskfailures.per.tracker=3 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.metadatastore.impl=org.apache.hadoop.fs.s3a.s3guard.NullMetadataStore 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.skip.checksum.errors=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.file.impl=org.apache.spark.DebugFilesystem 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client-am.ipc.max-retries-on-timeouts=3 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.timeout=200000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.max.split.locations=15 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.max-queue-length=15 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.session.timeout.ms=60000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.cache-ttl.secs=300 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jvm.system-properties-to-log=os.name,os.version,java.home,java.runtime.version,java.vendor,java.version,java.vm.name,java.class.path,java.io.tmpdir,user.dir,user.name 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.opportunistic-container-allocation.nodes-used=10 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.active-dir=/tmp/entity-file-history/active 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.transfer.buffer.size=131072 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.retry-interval-ms=1000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.http.policy=HTTP_ONLY 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.memory.debugFill=true 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.socket.send.buffer=8192 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.abfss.impl=org.apache.hadoop.fs.azurebfs.Abfss 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.uploader.server.address=0.0.0.0:8046 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation-token.max-conf-size-bytes=*********(redacted) 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.token.validity=*********(redacted) 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.max.connections=0 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.yarn.nodemanager.resource.memory-mb=4096 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.emit-timeline-data=false 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.system-reserved-memory-mb=-1 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.kerberos.min.seconds.before.relogin=60 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.move.thread-count=3 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.admin.client.thread-count=1 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.dispatcher.drain-events.timeout=300000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.buffer.dir=${hadoop.tmp.dir}/s3a 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.enabled.protocols=TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.admin.address=0.0.0.0:10033 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation-status.time-out.ms=600000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.uris= 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.assumed.role.sts.endpoint.region=us-west-1 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.port=13562 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.max-log-aggregation-diagnostics-in-memory=10 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.health-checker.interval-ms=600000 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.in.test=true 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.clientrm.interceptor-class.pipeline=org.apache.hadoop.yarn.server.router.clientrm.DefaultClientRequestInterceptor 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-appid-node.split-index=0 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.blocksize=67108864 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.sandbox-mode.local-dirs.permissions=read 21:47:38.891 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.rmadmin.interceptor-class.pipeline=org.apache.hadoop.yarn.server.router.rmadmin.DefaultRMAdminRequestInterceptor 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-container-debug-info.enabled=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.max-cached-nodemanagers-proxies=0 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.delete-delay-ms=20 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.delete.debug-delay-sec=0 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.pmem-check-enabled=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage=90.0 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.app-submission.cross-platform=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.work-preserving-recovery.scheduling-wait-ms=10000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-retry-minimum-interval-ms=1000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.secs=300 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.enabled=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.local.sas.key.mode=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.maximum.data.length=67108864 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.max.threads=0 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.pipeline.cache-max-size=25 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.load-comparator=QUEUE_LENGTH 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.authorization=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.complete.cancel.delegation.tokens=*********(redacted) 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.ui.enabled=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.paging.maximum=5000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:nfs.exports.allowed.hosts=* rw 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.ha.enable=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.http.policy=HTTP_ONLY 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.in-memory.check-period-mins=720 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.ssl=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.app.name=test-sql-context 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.rdbms.useLegacyNativeValueStrategy=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.application-client-protocol.poll-interval-ms=200 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.leveldb-store.compaction-interval-secs=86400 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.writer.class=org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.zookeeper.parent-znode=/hadoop-ha 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.server2.transport.mode=binary 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.policy.class=org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AllContainerLogAggregationPolicy 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.merge.percent=0.66 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.codegen.fallback=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.filter.group=(objectClass=group) 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.scheduler.pool-size=1 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.schema.verification=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resourcemanager.minimum.version=NONE 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.speculative-cap-running-tasks=0.1 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.admin.acl=* 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.supervised=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.admin.thread-count=1 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.automatic-failover.enabled=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.skip.maxgroups=0 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.connect.timeout=180000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.address=${yarn.resourcemanager.hostname}:8032 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.ping=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.local-fs.write-limit.bytes=-1 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.adl.oauth2.access.token.provider.type=*********(redacted) 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.ssl.file.buffer.size=65536 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.automatic-failover.embedded=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.gpu.docker-plugin=nvidia-docker-v1 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.enabled=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.purge=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.store.class=file 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.queue-limit-stdev=1.0f 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.max.attempts=5 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.output.fileoutputformat.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-monitor.procfs-tree.smaps-based-rss.enabled=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.bind.wildcard.addr=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.rest-csrf.enabled=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.connect-retry-interval.ms=1000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.keytab=/etc/krb5.keytab 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.keytab=/etc/security/keytab/jhs.service.keytab 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.threads.max=10 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.input.buffer.percent=0.70 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.allowed-container-networks=host,none,bridge 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-labels.resync-interval-ms=120000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.tmp.dir=/tmp/hadoop-${user.name} 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.maps=2 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.rest-csrf.custom-header=X-XSRF-Header 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.max.retry.interval=5000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.retain-check-interval-seconds=-1 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-tracker.client.thread-count=50 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.rm.system-metrics-publisher.emit-container-events=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.start-time-read-cache-size=10000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.automatic-failover.zk-base-path=/yarn-leader-election 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.seqfile.local.dir=${hadoop.tmp.dir}/io/local 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.throttle.retry.interval=100ms 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.wasb.impl=org.apache.hadoop.fs.azure.Wasb 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.validateConstraints=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.submit.file.replication=10 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.minicluster.fixed.ports=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.threshold=2147483647 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.done-dir=${yarn.app.mapreduce.am.staging-dir}/history/done 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.idlethreshold=4000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.strict-resource-usage=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.input.buffer.percent=0.0 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.userremapping-gid-threshold=1 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.rest-csrf.enabled=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.host.port=21 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.ping.interval=60000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.end.function.listeners= 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.history-writer.multi-threaded-dispatcher.pool-size=10 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.admin.address=${yarn.resourcemanager.hostname}:8033 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.client-write-packet-size=65536 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.kill.max=10 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.speculative=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.key.default.bitlength=128 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reducer.unconditional-preempt.delay.sec=300 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.interval-ms=120000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log.deletion-threads-count=4 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.filter-entity-list-by-user=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connection.maxidletime=10000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.io.sort.mb=100 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.client.thread-count=5 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.erasurecode.codec.rs.rawcoders=rs_native,rs_java 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.erasurecode.codec.rs-legacy.rawcoders=rs-legacy_java 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.admin.address=0.0.0.0:8047 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.algorithm.iterator=SERIAL 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.testkey=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.connectionPoolingType=BONECP 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.cache.cleanup.interval-ms=600000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.crypto.codec.classes.aes.ctr.nopadding=org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec, org.apache.hadoop.crypto.JceAesCtrCryptoCodec 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.cache.limit.max-resources-mb=0 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.ssl.enabled=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.process-kill-wait.ms=5000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.hdfs-servers=${fs.defaultFS} 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.workaround.non.threadsafe.getpwuid=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.df.interval=60000 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multiobjectdelete.enable=true 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.cleaner.resource-sleep-ms=0 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.min-healthy-disks=0.25 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.shell.missing.defaultFs.warning=false 21:47:38.892 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.file.buffer.size=65536 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.attr.member=member 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.random.device.file.path=/dev/urandom 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.sensitive-config-keys=*********(redacted) 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.max.retries=9 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.intermediate-data-encryption.enable=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.connect.retry-interval.ms=30000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container.stderr.pattern={*stderr*,*STDERR*} 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.minimum-allocation-mb=1024 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.staging-dir=/tmp/hadoop-yarn/staging 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.read.timeout=180000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.max-age=1800 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.erasurecode.codec.xor.rawcoders=xor_native,xor_java 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.establish.timeout=5000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.running.map.limit=0 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.control-resource-monitoring=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.require.client.cert=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.kerberos.kinit.command=kinit 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.state-store.class=org.apache.hadoop.yarn.server.federation.store.impl.MemoryFederationStateStore 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.ui.showConsoleProgress=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.log.level=INFO 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.dns.log-slow-lookups.threshold.ms=1000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.ubertask.enable=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:adl.http.timeout=-1 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.retry-attempts=3 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.caller.context.enabled=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.vmem-pmem-ratio=2.1 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.rpc.protection=authentication 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.rpc-timeout.ms=45000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.remote-app-log-dir=/tmp/logs 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.timeout-ms=10000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.cli.prune.age=86400000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.pcores-vcores-multiplier=1.0 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.sandbox-mode=disabled 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.containerlauncher.threadpool-initial-size=10 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.master.rest.enabled=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.threads=8 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.retry-interval-ms=1000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.crypto.buffer.size=8192 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-labels.provider.fetch-interval-ms=600000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.store.leveldb.path=${hadoop.tmp.dir}/mapred/history/recoverystore 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.failover-retries-on-socket-timeouts=0 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.enabled=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.authorization.caching.enable=true 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.instrumentation.requires.admin=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.delete.thread-count=4 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.finish-when-all-reducers-done=true 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.jaas.context=Client 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.path=${hadoop.tmp.dir}/yarn/timeline 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionUserName=APP 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.map.index.interval=128 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.max-queue-wait-time-ms=100 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.abfs.impl=org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.counters.max=120 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.rest-csrf.enabled=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.store-class=org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.move.interval-ms=180000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.fetch.thread-count=4 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.client.thread-count=50 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.hostname.verifier=DEFAULT 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-state-store.path=${hadoop.tmp.dir}/yarn/timeline 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.classloader=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.map.params=${mapreduce.task.profile.params} 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.timeout=20000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.auth_to_local.mechanism=hadoop 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.app-collector.linger-period.ms=60000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nm.liveness-monitor.expiry-interval-ms=600000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.reservation-system.planfollower.time-step=1000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.enable-userremapping.allowed=true 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.api-service.enable=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.enabled=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.retry.interval=1000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.du.interval=600000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container.stderr.tail.bytes=4096 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.read.timeout.ms=60000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.warn.after.ms=5000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.bytes-per-checksum=512 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.outputcommitter.factory.scheme.s3a=org.apache.hadoop.fs.s3a.commit.S3ACommitterFactory 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.background.reload=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-monitor.enabled=true 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.elastic-memory-control.enabled=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:net.topology.script.number.args=100 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.merge.progress.records=10000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.address=${yarn.nodemanager.hostname}:8040 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.keytab=/etc/krb5.keytab 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.fetch.retry.timeout-ms=30000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.rm.container-allocation.expiry-interval-ms=600000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.fileoutputcommitter.algorithm.version=2 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.work-preserving-recovery.enabled=true 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.skip.maxrecords=0 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.root-dir=/sharedcache 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.throttle.limit=${fs.s3a.attempts.maximum} 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.server2.thrift.http.port=0 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.type=simple 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.cache.limit.max-resources=0 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.userlog.limit.kb=0 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.monitor.enable=false 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.max.retries=10 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.retry.times=5 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-monitor.interval-ms=3000 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.unsafe.exceptionOnMemoryLeak=true 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.gpu.allowed-gpu-devices=auto 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.executor.id=driver 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.sharedcache.mode=disabled 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.rest-csrf.custom-header=X-XSRF-Header 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.listen.queue.size=128 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.mutation.acl-policy.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.DefaultConfigurationMutationACLPolicy 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.cpu.vcores=1 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.file-formats=TFile 21:47:38.893 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.fd-retain-secs=300 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.user.group.static.mapping.overrides=dr.who=; 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.validateColumns=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.sas.expiry.period=90d 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.store.class=org.apache.hadoop.mapreduce.v2.hs.HistoryServerFileSystemStateStoreService 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fail-fast=${yarn.fail-fast} 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.proxy-user-privileges.enabled=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.webapp.interceptor-class.pipeline=org.apache.hadoop.yarn.server.router.webapp.DefaultRequestInterceptorREST 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.cgroups.soft-limit-percentage=90.0 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reducer.preempt.delay.sec=0 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.util.hash.type=murmur 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.storeManagerType=rdbms 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-validator=basic 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client.job.max-retries=3 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.retry-delay.max.ms=60000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.connection.timeout.ms=60000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.params=-agentlib:hprof=cpu=samples,heap=sites,force=n,thread=y,verbose=n,file=%s 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.shuffle.log.backups=0 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.server2.thrift.port=0 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-diagnostics-maximum-size=10000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.retry.interval.ms=1000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.delete-timeout-ms=1000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.file.impl=org.apache.hadoop.fs.local.LocalFs 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.roll-monitoring-interval-seconds=-1 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.cleaner.interval-ms=86400000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.quorum=localhost:2181 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.output.fileoutputformat.compress=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.am-rm-tokens.master-key-rolling-interval-secs=*********(redacted) 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.assumed.role.session.duration=30m 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.conversion.rule=none 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.server.conf=ssl-server.xml 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.throttle.interval=1000ms 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:seq.io.sort.factor=100 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.cleaner.initial-delay-mins=10 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.completion.pollinterval=5000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.keystores.factory.class=org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.resource.cpu-vcores=1 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.enabled=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.capabilities=CHOWN,DAC_OVERRIDE,FSETID,FOWNER,MKNOD,NET_RAW,SETGID,SETUID,SETFCAP,SETPCAP,NET_BIND_SERVICE,SYS_CHROOT,KILL,AUDIT_WRITE 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.acl.enable=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.done-dir=/tmp/entity-file-history/done/ 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fs.state-store.uri=${hadoop.tmp.dir}/yarn/system/rmstore 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.always-scan-user-dir=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.opportunistic-containers-use-pause-for-preemption=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.nonsecure-mode.local-user=nobody 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.optimizer.excludedRules=org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.reader.class=org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.configuration.provider-class=org.apache.hadoop.yarn.LocalConfigurationProvider 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.userremapping-uid-threshold=1 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.configuration.file-system-based-store=/yarn/conf 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.cache.limit.max-single-resource-mb=0 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.stop.grace-period=10 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-profiles.source-file=resource-profiles.json 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.percentage-physical-cpu-limit=100 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.client.thread-count=10 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:tfile.fs.input.buffer.size=262144 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.progressmonitor.pollinterval=1000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-dirs=${yarn.log.dir}/userlogs 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.automatic.close=true 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.hostname=0.0.0.0 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.cgroups.swappiness=0 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.stream-buffer-size=4096 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.fail-fast=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.app-aggregation-interval-secs=15 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.filter.user=(&(objectClass=user)(sAMAccountName={0})) 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-localizer.log.level=INFO 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.address=${yarn.timeline-service.hostname}:10200 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.ubertask.maxmaps=9 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.threads.keepalivetime=60 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.files.preserve.failedtasks=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client.job.retry-interval=2000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.graceful-fence.connection.retries=1 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation.token.max-lifetime=*********(redacted) 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.drain-entities.timeout.ms=2000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.fpga.vendor-plugin.class=org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.fpga.IntelFpgaOpenclPlugin 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.summary-store=org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.cpu.vcores=1 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.encrypted-intermediate-data.buffer.kb=128 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.client.resolve.remote.symlinks=true 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.https.address=0.0.0.0:8044 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.allowed-origins=* 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.encrypted-intermediate-data=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.retain-seconds=604800 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.metrics.runtime.buckets=60,300,1440 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.generic-application-history.max-applications=10000 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.local-dirs=${hadoop.tmp.dir}/nm-local-dir 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.connection-keep-alive.enable=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-labels.configuration-type=centralized 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.path.style.access=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.aux-services.mapreduce_shuffle.class=org.apache.hadoop.mapred.ShuffleHandler 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.in-memory.staleness-period-mins=10080 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.adl.impl=org.apache.hadoop.fs.adl.AdlFileSystem 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanager.minimum.version=NONE 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.staging-dir.erasurecoding.enabled=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:net.topology.impl=org.apache.hadoop.net.NetworkTopology 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.map.index.skip=0 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.reader.webapp.https.address=${yarn.timeline-service.webapp.https.address} 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.data.connection.mode=ACTIVE_LOCAL_DATA_CONNECTION_MODE 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.local-fs.single-disk-limit.check.kill-limit-exceed=true 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.maximum-allocation-vcores=4 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.allowed-headers=X-Requested-With,Content-Type,Accept,Origin 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.compression-type=none 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.version=1.0f 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.ipc.rpc.class=org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.maxattempts=4 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.dns.log-slow-lookups.enabled=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.committer.setup.cleanup.needed=true 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.master=local[2] 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.running.reduce.limit=0 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.maximum.response.length=134217728 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.token.tracking.ids.enabled=*********(redacted) 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.caller.context.max.size=128 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.host-pid-namespace.allowed=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.delayed-removal.allowed=false 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.system.acls=sasl:yarn@, sasl:mapred@, sasl:hdfs@ 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.dir=${hadoop.tmp.dir}/yarn-nm-recovery 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.fast.upload.buffer=disk 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.intermediate-done-dir=${yarn.app.mapreduce.am.staging-dir}/history/done_intermediate 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.shuffle.log.separate=true 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.max.total.tasks=5 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.readahead.range=64K 21:47:38.894 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.rawstore.impl=org.apache.hadoop.hive.metastore.ObjectStore 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.simple.anonymous.allowed=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.attempts.maximum=20 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.connection.timeout.ms=15000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation-token-renewer.thread-count=*********(redacted) 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.health-checker.script.timeout-ms=1200000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.start-time-write-cache-size=10000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-profiles.enabled=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hbase-schema.prefix=prod. 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.authorization=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.log.level=INFO 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.decommissioning-nodes-watcher.poll-interval-secs=20 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.output.fileoutputformat.compress.type=RECORD 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.leveldb-state-store.path=${hadoop.tmp.dir}/yarn/system/rmstore 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.rest-csrf.custom-header=X-XSRF-Header 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.ifile.readahead.bytes=4194304 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.app-checker.class=org.apache.hadoop.yarn.server.sharedcachemanager.RemoteAppChecker 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.nonsecure-mode.limit-users=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.detect-hardware-capabilities=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.cluster.acls.enabled=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.retry-after-no-speculate=1000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.group.hierarchy.levels=0 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fs.state-store.retry-interval-ms=1000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.stream-buffer-size=4096 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.application-timeouts.monitor.interval-ms=3000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.output.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.speculative=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.retry-after-speculate=15000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.connectionPool.maxPoolSize=10 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.mount=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.container.log.backups=0 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.log.level=INFO 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reduce.slowstart.completedmaps=0.05 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.http-authentication.type=simple 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.attr.group.name=cn 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.fpga.allowed-fpga-devices=0,1 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.internal-timers-ttl-secs=420 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.logs.enabled=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.block.size=32M 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.client-server.address=0.0.0.0:8045 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.logaggregation.threadpool-size-max=100 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.hostname=0.0.0.0 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation.key.update-interval=86400000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.fetch.retry.enabled=${yarn.nodemanager.recovery.enabled} 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.memory.mb=-1 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.skip.start.attempts=2 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.hdfs.impl=org.apache.hadoop.fs.Hdfs 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.enable=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.cache.level2.type=none 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.tcpnodelay=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.rpc-timeout.ms=0 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.low-latency=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.input.lineinputformat.linespermap=1 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.interceptor.user.threadpool-size=5 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionURL=*********(redacted) 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.autoCreateAll=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.max.retries.on.timeouts=45 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.read-cache-size=104857600 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.har.impl=org.apache.hadoop.fs.HarFs 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.split.metainfo.maxsize=10000000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.am.liveness-monitor.expiry-interval-ms=600000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.container-tokens.master-key-rolling-interval-secs=*********(redacted) 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.app-cache-size=10 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.socket.recv.buffer=8192 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-tracker.address=${yarn.resourcemanager.hostname}:8031 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-labels.provider.fetch-timeout-ms=1200000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.heap.memory-mb.ratio=0.8 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.leveldb-state-store.compaction-interval-secs=3600 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.rest-csrf.custom-header=X-XSRF-Header 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.fs.path=file://${hadoop.tmp.dir}/yarn/system/schedconf 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.output.filter=FAILED 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.filter.initializers=org.apache.hadoop.http.lib.StaticUserWebFilter 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.memory.mb=-1 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hostname=0.0.0.0 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionDriverName=org.apache.derby.jdbc.EmbeddedDriver 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.replication=1 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-metrics.unregister-delay-ms=10000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-metrics.period-ms=-1 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.fileoutputcommitter.task.cleanup.enabled=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log.retain-seconds=10800 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.plugin.pluginRegistryBundleCheck=LOG 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.cleaner-interval-seconds=3600 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.keytab=/etc/krb5.keytab 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.event.listeners= 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.providers.combined=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.merge.inmem.threshold=1000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.recovery.enabled=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.saskey.usecontainersaskeyforallaccess=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.nm.uploader.thread-count=20 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanager-graceful-decommission-timeout-secs=3600 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.ssl.enabled=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hbase.coprocessor.app-final-value-retention-milliseconds=259200000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.abort.pending.uploads=true 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.opportunistic-containers-max-queue-length=0 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.state-store.max-completed-applications=${yarn.resourcemanager.max-completed-applications} 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionPassword=*********(redacted) 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.minimum-allowed-tasks=10 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.test.home=/home/jenkins/workspace/NewSparkPullRequestBuilder 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.retain-seconds=-1 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.min-free-space-per-disk-mb=0 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.max-age-ms=604800000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.allowed-methods=GET,POST,HEAD 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.opportunistic-container-allocation.enabled=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.address=0.0.0.0:19888 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.system.tags=YARN,HDFS,NAMENODE,DATANODE,REQUIRED,SECURITY,KERBEROS,PERFORMANCE,CLIENT ,SERVER,DEBUG,DEPRECATED,COMMON,OPTIONAL 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.file-controller.TFile.class=org.apache.hadoop.yarn.logaggregation.filecontroller.tfile.LogAggregationTFileController 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.nodemanager-connect.max-wait-ms=180000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.address=${yarn.resourcemanager.hostname}:8088 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.enable=false 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.parallelcopies=5 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.webhdfs.impl=org.apache.hadoop.fs.WebHdfs 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.trash.interval=0 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client.max-retries=3 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.authentication=simple 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.reduce.params=${mapreduce.task.profile.params} 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.resource.mb=1536 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.input.fileinputformat.list-status.num-threads=1 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-executor.class=org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.mapfile.bloom.size=1048576 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.ttl-ms=604800000 21:47:38.895 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.min-queue-length=5 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.cpu-vcores=-1 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reduces=1 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.size=100M 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.minimum-allocation-vcores=1 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.speculative-cap-total-tasks=0.01 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.client.conf=ssl-client.xml 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.queuename=default 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.encrypted-intermediate-data-key-size-bits=128 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.metadatastore.authoritative=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.sleep-after-disconnect.ms=1000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.shuffle.log.limit.kb=0 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.application-client-protocol.poll-timeout-ms=-1 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.jhist.format=binary 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.rdbms.initializeColumnInfo=NONE 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.enabled=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.staticuser.user=dr.who 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.exit.timeout.check-interval-ms=20000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.intermediate-user-done-dir.permissions=770 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.exit.timeout=60000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.resources-handler.class=org.apache.hadoop.yarn.server.nodemanager.util.DefaultLCEResourcesHandler 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.memory.limit.percent=0.25 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.reservation-system.enable=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.output.compress=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.zookeeper.acl=world:anyone:rwcda 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.server.max.connections=0 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.default-container-network=host 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.webapp.address=0.0.0.0:8089 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.maximum-allocation-mb=8192 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.monitor.policies=org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity.ProportionalCapacityPreemptionPolicy 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.cleaner.period-mins=1440 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.gpu.docker-plugin.nvidia-docker-v1.endpoint=http://localhost:3476/v1.0/docker/cli 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.container.log.limit.kb=0 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.retry.interval=1000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.http-cross-origin.enabled=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.wasbs.impl=org.apache.hadoop.fs.azure.NativeAzureFileSystem$Secure 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.subcluster-resolver.class=org.apache.hadoop.yarn.server.federation.resolver.DefaultSubClusterResolverImpl 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-state-store.parent-path=/rmstore 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.cleaner.enable=true 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.fd-flush-interval-secs=10 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.expiry=43200000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.nodemanager-client-async.thread-pool-max-size=500 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.maxattempts=4 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.exec.scratchdir=/home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.sorting-nodes-interval-ms=1000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.tmp.path=tmp/staging 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.sleep-delay-before-sigkill.ms=250 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.min-queue-wait-time-ms=10 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.retry.attempts=0 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.count-logical-processors-as-cores=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.root=/registry 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:adl.feature.ownerandgroup.enableupn=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-max-znode-size.bytes=1048576 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reduce.shuffle.consumer.plugin.class=org.apache.hadoop.mapreduce.task.reduce.Shuffle 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delayed.delegation-token.removal-interval-ms=*********(redacted) 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.cache.target-size-mb=10240 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.conflict-mode=fail 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.Multithreaded=true 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.libjars.wildcard=true 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.unique-filenames=true 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-attributes.provider.fetch-timeout-ms=1200000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.list.version=2 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.client-write-packet-size=65536 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.adl.impl=org.apache.hadoop.fs.adl.Adl 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.key.default.cipher=AES/CTR/NoPadding 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.failover-retries=0 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.purge.age=86400 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.local-fs.single-disk-limit.check.interval-ms=5000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:net.topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.address=0.0.0.0:8049 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.server.listen.queue.size=128 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:map.sort.class=org.apache.hadoop.util.QuickSort 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.viewfs.rename.strategy=SAME_MOUNTPOINT 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.authentication.retry-count=1 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.permissions.umask-mode=022 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.assumed.role.credentials.provider=org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.vmem-check-enabled=true 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.numa-awareness.enabled=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.compaction-interval-secs=3600 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client-am.ipc.max-retries=3 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.registry.base-dir=yarnfederation/ 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.app.id=local-1593406028241 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.max.map=-1 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.local-fs.single-disk-limit.bytes=-1 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.ubertask.maxreduces=1 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.size=500 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.java.secure.random.algorithm=SHA1PRNG 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.cli-check.rpc-timeout.ms=20000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.jobname.limit=50 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.nodemanager-connect.retry-interval-ms=10000 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.state-store-class=org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.env-whitelist=JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_HOME,PATH,LANG,TZ 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.nested-level=3 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.user.agent.prefix=unknown 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-delegation-token-node.split-index=*********(redacted) 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.numa-awareness.read-topology=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.address=${yarn.nodemanager.hostname}:8042 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:rpc.metrics.quantile.enable=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.registry.class=org.apache.hadoop.registry.client.impl.FSRegistryOperationsService 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.admin.acl=* 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.system-metrics-publisher.dispatcher.pool-size=10 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.queue-placement-rules=user-group 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.recovery.enabled=false 21:47:38.896 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.rest-csrf.enabled=false 21:47:38.896 pool-1-thread-1 DEBUG SessionState: SessionState user: null 21:47:38.898 pool-1-thread-1 DEBUG Utilities: Create dirs /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28 with permission rwx-wx-wx recursive true 21:47:38.899 pool-1-thread-1 DEBUG FileSystem: Looking for FS supporting file 21:47:38.899 pool-1-thread-1 DEBUG FileSystem: looking for configuration option fs.file.impl 21:47:38.899 pool-1-thread-1 DEBUG IsolatedClientLoader: shared class: org.apache.spark.DebugFilesystem 21:47:38.900 pool-1-thread-1 DEBUG FileSystem: Filesystem file defined in configuration option 21:47:38.900 pool-1-thread-1 DEBUG FileSystem: FS for file is class org.apache.spark.DebugFilesystem 21:47:38.910 pool-1-thread-1 DEBUG SessionState: HDFS root scratch dir: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28 with schema null, permission: rwx-wx-wx 21:47:38.913 pool-1-thread-1 INFO SessionState: Created HDFS directory: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28/jenkins 21:47:38.917 pool-1-thread-1 INFO SessionState: Created HDFS directory: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28/jenkins/241742d1-757a-4cfc-ae21-62dde34d51df 21:47:38.923 pool-1-thread-1 INFO SessionState: Created local directory: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/jenkins/241742d1-757a-4cfc-ae21-62dde34d51df 21:47:38.928 pool-1-thread-1 INFO SessionState: Created HDFS directory: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28/jenkins/241742d1-757a-4cfc-ae21-62dde34d51df/_tmp_space.db 21:47:38.929 pool-1-thread-1 INFO HiveClientImpl: Warehouse location for Hive client (version 2.3.7) is file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/spark-warehouse/org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite 21:47:38.929 pool-1-thread-1 INFO ServerUtils: Cleaning scratchDir : /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28 21:47:38.931 pool-1-thread-1 INFO SessionManager: Operation log root directory is created: /home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/jenkins/operation_logs 21:47:38.931 pool-1-thread-1 INFO SessionManager: HiveServer2: Background operation thread pool size: 100 21:47:38.932 pool-1-thread-1 INFO SessionManager: HiveServer2: Background operation thread wait queue size: 100 21:47:38.932 pool-1-thread-1 INFO SessionManager: HiveServer2: Background operation thread keepalive time: 10 seconds 21:47:38.932 pool-1-thread-1 INFO AbstractService: Service:OperationManager is inited. 21:47:38.932 pool-1-thread-1 INFO AbstractService: Service:SessionManager is inited. 21:47:38.932 pool-1-thread-1 INFO AbstractService: Service: CLIService is inited. 21:47:38.933 pool-1-thread-1 INFO AbstractService: Service:ThriftBinaryCLIService is inited. 21:47:38.933 pool-1-thread-1 INFO AbstractService: Service: HiveServer2 is inited. 21:47:38.933 pool-1-thread-1 INFO AbstractService: Service:OperationManager is started. 21:47:38.933 pool-1-thread-1 INFO AbstractService: Service:SessionManager is started. 21:47:38.934 pool-1-thread-1 INFO AbstractService: Service: CLIService is started. 21:47:38.934 pool-1-thread-1 INFO AbstractService: Service:ThriftBinaryCLIService is started. 21:47:38.934 pool-1-thread-1 INFO ThriftCLIService: Starting ThriftBinaryCLIService on port 33133 with 5...500 worker threads 21:47:38.935 pool-1-thread-1 INFO AbstractService: Service:HiveServer2 is started. 21:47:38.935 pool-1-thread-1 INFO HiveThriftServer2: HiveThriftServer2 started 21:47:38.936 pool-1-thread-1 INFO ThriftServerWithSparkContextInBinarySuite: Started HiveThriftServer2: mode=binary, port=33133, attempt=1 21:47:38.936 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:38.936 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:38.937 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@e900cae 21:47:38.937 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:38.937 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:38.937 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:38.937 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:38.937 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:38.937 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:38.937 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslServerTransport: transport map does not contain key 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@3f503dfe 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslServerTransport: Received start message with status START 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslTransport: SERVER: Start message handled 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@3197c5bc 21:47:38.938 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:38.938 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:38.938 HiveServer2-Handler-Pool: Thread-224 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:38.939 HiveServer2-Handler-Pool: Thread-224 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:38.939 HiveServer2-Handler-Pool: Thread-224 DEBUG ThriftCLIService: Client's username: jenkins 21:47:38.939 HiveServer2-Handler-Pool: Thread-224 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:38.940 HiveServer2-Handler-Pool: Thread-224 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:38.941 HiveServer2-Handler-Pool: Thread-224 DEBUG TSaslTransport: writing data length: 3487 21:47:38.941 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:38.941 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:39.042 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:39.042 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:39.043 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@674941d7 21:47:39.043 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:39.043 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:39.043 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:39.043 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:39.043 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:39.043 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:39.044 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslServerTransport: transport map does not contain key 21:47:39.044 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@9bb65a6 21:47:39.044 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:39.044 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslServerTransport: Received start message with status START 21:47:39.044 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:39.044 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslTransport: SERVER: Start message handled 21:47:39.044 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:39.045 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:39.045 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:39.045 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@77c9b39 21:47:39.045 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:39.045 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:39.045 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:39.045 HiveServer2-Handler-Pool: Thread-225 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:39.045 HiveServer2-Handler-Pool: Thread-225 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.045 HiveServer2-Handler-Pool: Thread-225 DEBUG ThriftCLIService: Client's username: jenkins 21:47:39.045 HiveServer2-Handler-Pool: Thread-225 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.047 HiveServer2-Handler-Pool: Thread-225 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:39.047 HiveServer2-Handler-Pool: Thread-225 DEBUG TSaslTransport: writing data length: 3487 21:47:39.048 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:39.048 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:39.149 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:39.149 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:39.149 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@18fa3843 21:47:39.149 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:39.149 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:39.149 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:39.149 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:39.149 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:39.149 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslServerTransport: transport map does not contain key 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@4c55b54a 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslServerTransport: Received start message with status START 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslTransport: SERVER: Start message handled 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:39.150 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@42925368 21:47:39.150 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:39.150 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:39.151 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:39.151 HiveServer2-Handler-Pool: Thread-226 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:39.151 HiveServer2-Handler-Pool: Thread-226 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.151 HiveServer2-Handler-Pool: Thread-226 DEBUG ThriftCLIService: Client's username: jenkins 21:47:39.151 HiveServer2-Handler-Pool: Thread-226 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.152 HiveServer2-Handler-Pool: Thread-226 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:39.152 HiveServer2-Handler-Pool: Thread-226 DEBUG TSaslTransport: writing data length: 3487 21:47:39.152 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:39.153 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:39.253 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:39.253 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:39.254 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@4fe65c74 21:47:39.254 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:39.255 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:39.255 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:39.255 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:39.255 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:39.255 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:39.255 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslServerTransport: transport map does not contain key 21:47:39.255 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@6c60f0b9 21:47:39.255 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:39.255 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslServerTransport: Received start message with status START 21:47:39.255 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslTransport: SERVER: Start message handled 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@62a7029d 21:47:39.256 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:39.256 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 DEBUG ThriftCLIService: Client's username: jenkins 21:47:39.256 HiveServer2-Handler-Pool: Thread-227 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.258 HiveServer2-Handler-Pool: Thread-227 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:39.260 HiveServer2-Handler-Pool: Thread-227 DEBUG TSaslTransport: writing data length: 3487 21:47:39.260 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:39.260 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:39.361 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:39.361 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:39.361 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@419dcc5a 21:47:39.362 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:39.362 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:39.362 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:39.362 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:39.362 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:39.362 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslServerTransport: transport map does not contain key 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@532cc981 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslServerTransport: Received start message with status START 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslTransport: SERVER: Start message handled 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:39.362 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@5deb2c4c 21:47:39.362 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:39.363 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:39.363 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:39.363 HiveServer2-Handler-Pool: Thread-228 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:39.363 HiveServer2-Handler-Pool: Thread-228 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.363 HiveServer2-Handler-Pool: Thread-228 DEBUG ThriftCLIService: Client's username: jenkins 21:47:39.363 HiveServer2-Handler-Pool: Thread-228 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.364 HiveServer2-Handler-Pool: Thread-228 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:39.367 HiveServer2-Handler-Pool: Thread-228 DEBUG TSaslTransport: writing data length: 3487 21:47:39.367 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:39.367 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:39.468 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:39.468 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:39.469 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@6c777a47 21:47:39.469 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:39.469 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:39.469 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:39.469 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:39.469 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:39.469 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:39.470 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslServerTransport: transport map does not contain key 21:47:39.470 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@921d2ef 21:47:39.470 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:39.470 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslServerTransport: Received start message with status START 21:47:39.470 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:39.471 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslTransport: SERVER: Start message handled 21:47:39.471 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:39.471 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:39.471 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:39.471 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@7e63c63 21:47:39.471 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:39.471 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:39.471 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:39.471 HiveServer2-Handler-Pool: Thread-229 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:39.472 HiveServer2-Handler-Pool: Thread-229 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.472 HiveServer2-Handler-Pool: Thread-229 DEBUG ThriftCLIService: Client's username: jenkins 21:47:39.472 HiveServer2-Handler-Pool: Thread-229 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.474 HiveServer2-Handler-Pool: Thread-229 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:39.478 HiveServer2-Handler-Pool: Thread-229 DEBUG TSaslTransport: writing data length: 3487 21:47:39.478 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:39.479 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:39.579 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:39.579 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:39.580 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@25867faa 21:47:39.580 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:39.580 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:39.580 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:39.580 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:39.580 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:39.580 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:39.581 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslServerTransport: transport map does not contain key 21:47:39.581 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@3a16d68e 21:47:39.581 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:39.581 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslServerTransport: Received start message with status START 21:47:39.581 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslTransport: SERVER: Start message handled 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:39.582 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@34d12e64 21:47:39.582 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 DEBUG ThriftCLIService: Client's username: jenkins 21:47:39.582 HiveServer2-Handler-Pool: Thread-230 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.584 HiveServer2-Handler-Pool: Thread-230 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:39.585 HiveServer2-Handler-Pool: Thread-230 DEBUG TSaslTransport: writing data length: 3487 21:47:39.586 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:39.586 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:39.686 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:39.687 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:39.687 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@587e8292 21:47:39.688 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:39.688 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:39.688 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:39.688 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:39.688 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:39.688 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:39.688 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslServerTransport: transport map does not contain key 21:47:39.689 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@179a074d 21:47:39.689 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:39.689 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslServerTransport: Received start message with status START 21:47:39.689 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:39.689 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslTransport: SERVER: Start message handled 21:47:39.689 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:39.689 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:39.689 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:39.689 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@195dfa16 21:47:39.690 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:39.690 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:39.690 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:39.690 HiveServer2-Handler-Pool: Thread-231 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:39.690 HiveServer2-Handler-Pool: Thread-231 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.690 HiveServer2-Handler-Pool: Thread-231 DEBUG ThriftCLIService: Client's username: jenkins 21:47:39.690 HiveServer2-Handler-Pool: Thread-231 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.692 HiveServer2-Handler-Pool: Thread-231 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:39.693 HiveServer2-Handler-Pool: Thread-231 DEBUG TSaslTransport: writing data length: 3487 21:47:39.693 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:39.694 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:39.794 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:39.795 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:39.795 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@772e7adb 21:47:39.795 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:39.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:39.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:39.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:39.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:39.795 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:39.796 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslServerTransport: transport map does not contain key 21:47:39.796 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@1fd49ee6 21:47:39.796 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:39.796 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslServerTransport: Received start message with status START 21:47:39.796 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:39.796 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslTransport: SERVER: Start message handled 21:47:39.796 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:39.797 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:39.797 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:39.797 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:39.797 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@111372b4 21:47:39.797 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:39.797 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:39.797 HiveServer2-Handler-Pool: Thread-232 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:39.797 HiveServer2-Handler-Pool: Thread-232 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.797 HiveServer2-Handler-Pool: Thread-232 DEBUG ThriftCLIService: Client's username: jenkins 21:47:39.797 HiveServer2-Handler-Pool: Thread-232 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.799 HiveServer2-Handler-Pool: Thread-232 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:39.800 HiveServer2-Handler-Pool: Thread-232 DEBUG TSaslTransport: writing data length: 3487 21:47:39.800 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:39.801 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:39.901 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:39.901 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:39.902 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@20dc933b 21:47:39.902 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:39.902 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:39.902 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:39.902 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:39.902 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:39.902 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:39.902 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslServerTransport: transport map does not contain key 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@2eaa0254 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslServerTransport: Received start message with status START 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslTransport: SERVER: Start message handled 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@206de829 21:47:39.903 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:39.903 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG ThriftCLIService: Client's username: jenkins 21:47:39.903 HiveServer2-Handler-Pool: Thread-233 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:39.904 HiveServer2-Handler-Pool: Thread-233 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:39.905 HiveServer2-Handler-Pool: Thread-233 DEBUG TSaslTransport: writing data length: 3487 21:47:39.905 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:39.905 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:40.006 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:40.007 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:40.007 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@36b1c3a5 21:47:40.007 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:40.007 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:40.007 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:40.007 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:40.007 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:40.007 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:40.008 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslServerTransport: transport map does not contain key 21:47:40.008 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@74004fe3 21:47:40.008 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:40.008 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslServerTransport: Received start message with status START 21:47:40.008 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:40.008 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslTransport: SERVER: Start message handled 21:47:40.008 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:40.008 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:40.009 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:40.009 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:40.009 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@7ac7aecb 21:47:40.009 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:40.009 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:40.009 HiveServer2-Handler-Pool: Thread-234 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:40.009 HiveServer2-Handler-Pool: Thread-234 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:40.009 HiveServer2-Handler-Pool: Thread-234 DEBUG ThriftCLIService: Client's username: jenkins 21:47:40.009 HiveServer2-Handler-Pool: Thread-234 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:40.011 HiveServer2-Handler-Pool: Thread-234 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:40.012 HiveServer2-Handler-Pool: Thread-234 DEBUG TSaslTransport: writing data length: 3487 21:47:40.012 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:40.012 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:41.013 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:41.013 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:41.014 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@1af7bdf3 21:47:41.014 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:41.014 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:41.014 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:41.014 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:41.014 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:41.014 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:41.015 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslServerTransport: transport map does not contain key 21:47:41.015 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@3f3aa87b 21:47:41.015 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:41.015 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslServerTransport: Received start message with status START 21:47:41.015 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslTransport: SERVER: Start message handled 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@31fd4034 21:47:41.016 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:41.016 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 DEBUG ThriftCLIService: Client's username: jenkins 21:47:41.016 HiveServer2-Handler-Pool: Thread-235 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:41.018 HiveServer2-Handler-Pool: Thread-235 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:41.019 HiveServer2-Handler-Pool: Thread-235 DEBUG TSaslTransport: writing data length: 3487 21:47:41.019 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:41.019 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:42.020 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:42.020 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:42.020 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@1e0be4d4 21:47:42.021 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:42.021 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:42.021 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:42.021 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:42.021 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:42.021 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:42.021 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslServerTransport: transport map does not contain key 21:47:42.021 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@2c3a260b 21:47:42.021 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:42.021 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslServerTransport: Received start message with status START 21:47:42.021 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:42.021 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslTransport: SERVER: Start message handled 21:47:42.022 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:42.022 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:42.022 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:42.022 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@22fdcc24 21:47:42.022 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:42.022 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:42.022 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:42.022 HiveServer2-Handler-Pool: Thread-236 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:42.022 HiveServer2-Handler-Pool: Thread-236 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:42.022 HiveServer2-Handler-Pool: Thread-236 DEBUG ThriftCLIService: Client's username: jenkins 21:47:42.022 HiveServer2-Handler-Pool: Thread-236 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:42.024 HiveServer2-Handler-Pool: Thread-236 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:42.025 HiveServer2-Handler-Pool: Thread-236 DEBUG TSaslTransport: writing data length: 3487 21:47:42.025 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:42.026 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:43.026 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:43.026 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:43.027 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@5a8ae965 21:47:43.027 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:43.027 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:43.027 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:43.027 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:43.027 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:43.027 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:43.027 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslServerTransport: transport map does not contain key 21:47:43.027 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@6e3cc30d 21:47:43.027 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:43.027 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslServerTransport: Received start message with status START 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslTransport: SERVER: Start message handled 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@60fb2162 21:47:43.028 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:43.028 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG ThriftCLIService: Client's username: jenkins 21:47:43.028 HiveServer2-Handler-Pool: Thread-237 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:43.029 HiveServer2-Handler-Pool: Thread-237 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:43.030 HiveServer2-Handler-Pool: Thread-237 DEBUG TSaslTransport: writing data length: 3487 21:47:43.030 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:43.030 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:44.030 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:44.031 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:44.031 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@629eb57a 21:47:44.032 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:44.032 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:44.032 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:44.032 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:44.032 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:44.032 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslServerTransport: transport map does not contain key 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@52fc0bc 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslServerTransport: Received start message with status START 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslTransport: SERVER: Start message handled 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:44.032 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@be82fb1 21:47:44.032 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:44.033 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:44.033 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:44.033 HiveServer2-Handler-Pool: Thread-238 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:44.033 HiveServer2-Handler-Pool: Thread-238 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:44.033 HiveServer2-Handler-Pool: Thread-238 DEBUG ThriftCLIService: Client's username: jenkins 21:47:44.033 HiveServer2-Handler-Pool: Thread-238 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:44.034 HiveServer2-Handler-Pool: Thread-238 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:44.035 HiveServer2-Handler-Pool: Thread-238 DEBUG TSaslTransport: writing data length: 3487 21:47:44.035 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:44.036 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:45.036 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:45.036 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:45.037 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@452184ff 21:47:45.037 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:45.037 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:45.037 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:45.038 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:45.038 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:45.038 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:45.038 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslServerTransport: transport map does not contain key 21:47:45.038 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@6da0a0d3 21:47:45.038 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:45.038 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslServerTransport: Received start message with status START 21:47:45.038 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslTransport: SERVER: Start message handled 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@636752cf 21:47:45.039 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:45.039 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 DEBUG ThriftCLIService: Client's username: jenkins 21:47:45.039 HiveServer2-Handler-Pool: Thread-239 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:45.042 HiveServer2-Handler-Pool: Thread-239 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:45.042 HiveServer2-Handler-Pool: Thread-239 DEBUG TSaslTransport: writing data length: 3487 21:47:45.043 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:45.043 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:46.044 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:46.044 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:46.044 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@44df5cb 21:47:46.045 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:46.045 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:46.045 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:46.045 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:46.045 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:46.045 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:46.045 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslServerTransport: transport map does not contain key 21:47:46.045 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@12f0dec6 21:47:46.046 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:46.046 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslServerTransport: Received start message with status START 21:47:46.046 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:46.046 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslTransport: SERVER: Start message handled 21:47:46.046 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:46.046 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:46.046 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:46.046 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@1c373330 21:47:46.046 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:46.046 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:46.046 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:46.047 HiveServer2-Handler-Pool: Thread-240 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:46.047 HiveServer2-Handler-Pool: Thread-240 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:46.047 HiveServer2-Handler-Pool: Thread-240 DEBUG ThriftCLIService: Client's username: jenkins 21:47:46.047 HiveServer2-Handler-Pool: Thread-240 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:46.048 HiveServer2-Handler-Pool: Thread-240 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:46.049 HiveServer2-Handler-Pool: Thread-240 DEBUG TSaslTransport: writing data length: 3487 21:47:46.049 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:46.050 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:47.050 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:47.050 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:47.051 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@901e5e6 21:47:47.053 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:47.053 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:47.053 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:47.053 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:47.053 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:47.053 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslServerTransport: transport map does not contain key 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@744c1b3f 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslServerTransport: Received start message with status START 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslTransport: SERVER: Start message handled 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:47.054 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@7a8bfa1e 21:47:47.055 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:47.055 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:47.055 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:47.055 HiveServer2-Handler-Pool: Thread-241 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:47.055 HiveServer2-Handler-Pool: Thread-241 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:47.055 HiveServer2-Handler-Pool: Thread-241 DEBUG ThriftCLIService: Client's username: jenkins 21:47:47.055 HiveServer2-Handler-Pool: Thread-241 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:47.056 HiveServer2-Handler-Pool: Thread-241 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:47.057 HiveServer2-Handler-Pool: Thread-241 DEBUG TSaslTransport: writing data length: 3487 21:47:47.057 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:47.058 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:48.058 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:48.059 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:48.059 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@1ea3b4d9 21:47:48.059 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:48.059 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:48.059 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:48.059 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:48.060 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:48.060 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:48.060 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslServerTransport: transport map does not contain key 21:47:48.060 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@6373b9fa 21:47:48.060 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:48.060 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslServerTransport: Received start message with status START 21:47:48.060 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslTransport: SERVER: Start message handled 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@6db45a3c 21:47:48.061 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:48.061 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 DEBUG ThriftCLIService: Client's username: jenkins 21:47:48.061 HiveServer2-Handler-Pool: Thread-242 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:48.063 HiveServer2-Handler-Pool: Thread-242 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:48.064 HiveServer2-Handler-Pool: Thread-242 DEBUG TSaslTransport: writing data length: 3487 21:47:48.064 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:48.064 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:49.065 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:49.065 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:49.065 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@576cdd8e 21:47:49.066 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:49.066 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:49.066 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:49.066 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:49.066 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:49.066 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:49.066 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslServerTransport: transport map does not contain key 21:47:49.066 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@28c276e8 21:47:49.067 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:49.067 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslServerTransport: Received start message with status START 21:47:49.067 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:49.067 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslTransport: SERVER: Start message handled 21:47:49.067 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:49.067 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:49.067 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:49.067 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@26058c58 21:47:49.067 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:49.067 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:49.067 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:49.068 HiveServer2-Handler-Pool: Thread-243 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:49.068 HiveServer2-Handler-Pool: Thread-243 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:49.068 HiveServer2-Handler-Pool: Thread-243 DEBUG ThriftCLIService: Client's username: jenkins 21:47:49.068 HiveServer2-Handler-Pool: Thread-243 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:49.069 HiveServer2-Handler-Pool: Thread-243 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:49.070 HiveServer2-Handler-Pool: Thread-243 DEBUG TSaslTransport: writing data length: 3487 21:47:49.070 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:49.071 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:50.071 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:50.071 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:50.072 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@3c649905 21:47:50.072 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:50.072 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:50.072 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:50.072 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:50.072 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:50.072 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslServerTransport: transport map does not contain key 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@7fab154f 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslServerTransport: Received start message with status START 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslTransport: SERVER: Start message handled 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:50.073 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@716ce512 21:47:50.074 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:50.074 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:50.074 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:50.074 HiveServer2-Handler-Pool: Thread-244 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:50.074 HiveServer2-Handler-Pool: Thread-244 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:50.074 HiveServer2-Handler-Pool: Thread-244 DEBUG ThriftCLIService: Client's username: jenkins 21:47:50.074 HiveServer2-Handler-Pool: Thread-244 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:50.076 HiveServer2-Handler-Pool: Thread-244 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:50.076 HiveServer2-Handler-Pool: Thread-244 DEBUG TSaslTransport: writing data length: 3487 21:47:50.077 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:50.077 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:51.078 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:51.078 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:51.078 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@3a4c75ba 21:47:51.078 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:51.079 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:51.079 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:51.079 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:51.079 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:51.079 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:51.079 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslServerTransport: transport map does not contain key 21:47:51.079 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@54a312eb 21:47:51.079 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:51.079 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslServerTransport: Received start message with status START 21:47:51.079 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:51.080 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslTransport: SERVER: Start message handled 21:47:51.080 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:51.080 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:51.080 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:51.080 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@5a64e7d7 21:47:51.080 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:51.080 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:51.080 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:51.080 HiveServer2-Handler-Pool: Thread-245 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:51.080 HiveServer2-Handler-Pool: Thread-245 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:51.080 HiveServer2-Handler-Pool: Thread-245 DEBUG ThriftCLIService: Client's username: jenkins 21:47:51.081 HiveServer2-Handler-Pool: Thread-245 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:51.082 HiveServer2-Handler-Pool: Thread-245 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:51.083 HiveServer2-Handler-Pool: Thread-245 DEBUG TSaslTransport: writing data length: 3487 21:47:51.083 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:51.084 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:52.084 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:52.084 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:52.085 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@39ca1b56 21:47:52.085 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:52.085 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:52.086 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:52.086 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:52.086 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:52.086 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:52.086 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslServerTransport: transport map does not contain key 21:47:52.086 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@78f02aee 21:47:52.086 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:52.086 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslServerTransport: Received start message with status START 21:47:52.086 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:52.087 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslTransport: SERVER: Start message handled 21:47:52.087 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:52.087 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:52.087 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:52.087 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@7637da58 21:47:52.087 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:52.087 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:52.087 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:52.087 HiveServer2-Handler-Pool: Thread-246 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:52.088 HiveServer2-Handler-Pool: Thread-246 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:52.088 HiveServer2-Handler-Pool: Thread-246 DEBUG ThriftCLIService: Client's username: jenkins 21:47:52.088 HiveServer2-Handler-Pool: Thread-246 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:52.089 HiveServer2-Handler-Pool: Thread-246 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:52.090 HiveServer2-Handler-Pool: Thread-246 DEBUG TSaslTransport: writing data length: 3487 21:47:52.090 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:52.091 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:53.091 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:53.092 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:53.092 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@87b314c 21:47:53.092 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:53.092 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:53.093 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:53.093 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:53.093 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:53.093 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:53.093 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslServerTransport: transport map does not contain key 21:47:53.093 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@4fbb6a12 21:47:53.093 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:53.093 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslServerTransport: Received start message with status START 21:47:53.093 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslTransport: SERVER: Start message handled 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:53.094 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@417c9c4d 21:47:53.094 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 DEBUG ThriftCLIService: Client's username: jenkins 21:47:53.094 HiveServer2-Handler-Pool: Thread-247 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:53.096 HiveServer2-Handler-Pool: Thread-247 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:53.097 HiveServer2-Handler-Pool: Thread-247 DEBUG TSaslTransport: writing data length: 3487 21:47:53.097 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:53.098 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:54.098 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:54.099 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:54.099 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@104f2df7 21:47:54.099 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:54.099 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:54.099 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:54.099 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:54.099 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:54.100 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:54.100 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslServerTransport: transport map does not contain key 21:47:54.100 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@7c56eb42 21:47:54.100 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:54.100 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslServerTransport: Received start message with status START 21:47:54.100 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:54.100 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslTransport: SERVER: Start message handled 21:47:54.100 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:54.101 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:54.101 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:54.101 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:54.101 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@72910b60 21:47:54.101 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:54.101 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:54.101 HiveServer2-Handler-Pool: Thread-248 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:54.101 HiveServer2-Handler-Pool: Thread-248 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:54.101 HiveServer2-Handler-Pool: Thread-248 DEBUG ThriftCLIService: Client's username: jenkins 21:47:54.101 HiveServer2-Handler-Pool: Thread-248 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:54.103 HiveServer2-Handler-Pool: Thread-248 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:54.103 HiveServer2-Handler-Pool: Thread-248 DEBUG TSaslTransport: writing data length: 3487 21:47:54.103 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:54.104 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:55.117 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:55.117 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:55.117 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@49777be1 21:47:55.118 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:55.118 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:55.118 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:55.118 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:55.118 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:55.118 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslServerTransport: transport map does not contain key 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@16f89e19 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslServerTransport: Received start message with status START 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslTransport: SERVER: Start message handled 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:55.119 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@183f736e 21:47:55.119 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:55.119 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:55.120 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:55.120 HiveServer2-Handler-Pool: Thread-249 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:55.120 HiveServer2-Handler-Pool: Thread-249 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:55.120 HiveServer2-Handler-Pool: Thread-249 DEBUG ThriftCLIService: Client's username: jenkins 21:47:55.120 HiveServer2-Handler-Pool: Thread-249 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:55.122 HiveServer2-Handler-Pool: Thread-249 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:55.123 HiveServer2-Handler-Pool: Thread-249 DEBUG TSaslTransport: writing data length: 3487 21:47:55.123 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:55.123 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:56.124 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:56.124 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:56.124 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@200ec932 21:47:56.125 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:56.125 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:56.125 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:56.125 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:56.125 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:56.125 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:56.125 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslServerTransport: transport map does not contain key 21:47:56.125 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@74951d19 21:47:56.126 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:56.126 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslServerTransport: Received start message with status START 21:47:56.126 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:56.126 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslTransport: SERVER: Start message handled 21:47:56.126 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:56.126 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:56.126 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:56.126 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@7a52fc23 21:47:56.126 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:56.126 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:56.126 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:56.127 HiveServer2-Handler-Pool: Thread-250 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:56.127 HiveServer2-Handler-Pool: Thread-250 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:56.127 HiveServer2-Handler-Pool: Thread-250 DEBUG ThriftCLIService: Client's username: jenkins 21:47:56.127 HiveServer2-Handler-Pool: Thread-250 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:56.128 HiveServer2-Handler-Pool: Thread-250 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:56.129 HiveServer2-Handler-Pool: Thread-250 DEBUG TSaslTransport: writing data length: 3487 21:47:56.129 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:56.130 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:57.130 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:57.130 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:57.131 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@712e6d2e 21:47:57.131 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:57.131 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:57.131 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:57.131 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:57.131 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:57.131 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:57.132 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslServerTransport: transport map does not contain key 21:47:57.132 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@69ec041d 21:47:57.132 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:57.132 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslServerTransport: Received start message with status START 21:47:57.132 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:57.132 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslTransport: SERVER: Start message handled 21:47:57.132 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:57.132 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:57.132 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:57.133 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@672bf688 21:47:57.133 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:57.133 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:57.133 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:57.133 HiveServer2-Handler-Pool: Thread-251 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:57.133 HiveServer2-Handler-Pool: Thread-251 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:57.133 HiveServer2-Handler-Pool: Thread-251 DEBUG ThriftCLIService: Client's username: jenkins 21:47:57.133 HiveServer2-Handler-Pool: Thread-251 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:57.135 HiveServer2-Handler-Pool: Thread-251 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:57.135 HiveServer2-Handler-Pool: Thread-251 DEBUG TSaslTransport: writing data length: 3487 21:47:57.136 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:57.136 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:58.137 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:58.137 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:58.137 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@18e7c669 21:47:58.137 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:58.137 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:58.138 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:58.138 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:58.138 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:58.138 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:58.138 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslServerTransport: transport map does not contain key 21:47:58.138 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@65ae3420 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslServerTransport: Received start message with status START 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslTransport: SERVER: Start message handled 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@6b69d73d 21:47:58.139 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:58.139 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG ThriftCLIService: Client's username: jenkins 21:47:58.139 HiveServer2-Handler-Pool: Thread-252 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:58.141 HiveServer2-Handler-Pool: Thread-252 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:58.142 HiveServer2-Handler-Pool: Thread-252 DEBUG TSaslTransport: writing data length: 3487 21:47:58.142 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:58.143 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:47:59.143 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:47:59.143 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:47:59.144 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@6abd1ee7 21:47:59.144 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:47:59.144 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:47:59.144 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:47:59.144 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:47:59.144 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:47:59.144 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:47:59.145 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslServerTransport: transport map does not contain key 21:47:59.145 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@7c5dcd7c 21:47:59.145 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:47:59.145 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslServerTransport: Received start message with status START 21:47:59.145 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslTransport: SERVER: Start message handled 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@729a2d5f 21:47:59.146 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:47:59.146 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslTransport: SERVER: reading data length: 144 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 DEBUG ThriftCLIService: Client's username: jenkins 21:47:59.146 HiveServer2-Handler-Pool: Thread-253 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:47:59.148 HiveServer2-Handler-Pool: Thread-253 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:47:59.149 HiveServer2-Handler-Pool: Thread-253 DEBUG TSaslTransport: writing data length: 3487 21:47:59.149 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:47:59.149 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:48:00.150 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:48:00.150 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:48:00.150 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@3cac1ddb 21:48:00.151 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:48:00.151 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:48:00.151 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:48:00.151 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:48:00.151 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:48:00.151 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:48:00.151 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslServerTransport: transport map does not contain key 21:48:00.151 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@2106734f 21:48:00.152 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:48:00.152 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslServerTransport: Received start message with status START 21:48:00.152 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:48:00.152 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslTransport: SERVER: Start message handled 21:48:00.152 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:48:00.152 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:48:00.152 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:48:00.152 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@2fc188c7 21:48:00.152 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:48:00.152 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:48:00.152 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslTransport: SERVER: reading data length: 144 21:48:00.153 HiveServer2-Handler-Pool: Thread-254 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:48:00.153 HiveServer2-Handler-Pool: Thread-254 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:48:00.153 HiveServer2-Handler-Pool: Thread-254 DEBUG ThriftCLIService: Client's username: jenkins 21:48:00.153 HiveServer2-Handler-Pool: Thread-254 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:48:00.155 HiveServer2-Handler-Pool: Thread-254 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:48:00.156 HiveServer2-Handler-Pool: Thread-254 DEBUG TSaslTransport: writing data length: 3487 21:48:00.156 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:48:00.158 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:48:01.159 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:48:01.159 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:48:01.159 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@3e69507d 21:48:01.159 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:48:01.159 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:48:01.159 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:48:01.160 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:48:01.160 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:48:01.160 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:48:01.160 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslServerTransport: transport map does not contain key 21:48:01.160 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@52978723 21:48:01.160 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:48:01.160 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslServerTransport: Received start message with status START 21:48:01.160 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslTransport: SERVER: Start message handled 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@5c5072d9 21:48:01.161 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:48:01.161 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslTransport: SERVER: reading data length: 144 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 DEBUG ThriftCLIService: Client's username: jenkins 21:48:01.161 HiveServer2-Handler-Pool: Thread-255 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:48:01.163 HiveServer2-Handler-Pool: Thread-255 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:48:01.164 HiveServer2-Handler-Pool: Thread-255 DEBUG TSaslTransport: writing data length: 3487 21:48:01.164 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:48:01.164 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:48:02.165 pool-1-thread-1 INFO Utils: Supplied authorities: localhost:33133 21:48:02.165 pool-1-thread-1 INFO Utils: Resolved authority: localhost:33133 21:48:02.165 pool-1-thread-1 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslClientTransport@189b0d57 21:48:02.166 pool-1-thread-1 DEBUG TSaslClientTransport: Sending mechanism name PLAIN and initial response of length 18 21:48:02.166 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status START and payload length 5 21:48:02.166 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Writing message with status COMPLETE and payload length 18 21:48:02.166 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Start message handled 21:48:02.166 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Main negotiation loop complete 21:48:02.166 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: SASL Client receiving last message 21:48:02.167 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslServerTransport: transport map does not contain key 21:48:02.167 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslTransport: opening transport org.apache.thrift.transport.TSaslServerTransport@6c95a133 21:48:02.167 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslTransport: SERVER: Received message with status START and payload length 5 21:48:02.167 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslServerTransport: Received start message with status START 21:48:02.167 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslServerTransport: Received mechanism name 'PLAIN' 21:48:02.167 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslTransport: SERVER: Start message handled 21:48:02.168 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslTransport: SERVER: Received message with status COMPLETE and payload length 18 21:48:02.168 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslTransport: SERVER: Writing message with status COMPLETE and payload length 0 21:48:02.168 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslTransport: SERVER: Main negotiation loop complete 21:48:02.168 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslServerTransport: transport map does contain key org.apache.thrift.transport.TSocket@62524309 21:48:02.168 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: Received message with status COMPLETE and payload length 0 21:48:02.168 pool-1-thread-1 DEBUG TSaslTransport: writing data length: 144 21:48:02.168 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslTransport: SERVER: reading data length: 144 21:48:02.168 HiveServer2-Handler-Pool: Thread-256 INFO ThriftCLIService: Client protocol version: HIVE_CLI_SERVICE_PROTOCOL_V10 21:48:02.168 HiveServer2-Handler-Pool: Thread-256 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:48:02.168 HiveServer2-Handler-Pool: Thread-256 DEBUG ThriftCLIService: Client's username: jenkins 21:48:02.168 HiveServer2-Handler-Pool: Thread-256 DEBUG ThriftCLIService: Client's IP Address: 127.0.0.1 21:48:02.170 HiveServer2-Handler-Pool: Thread-256 WARN ThriftCLIService: Error opening session: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:48:34.493 HiveServer2-Handler-Pool: Thread-256 DEBUG TSaslTransport: writing data length: 3487 21:48:34.493 pool-1-thread-1 DEBUG TSaslTransport: CLIENT: reading data length: 3487 21:48:34.494 pool-1-thread-1 WARN HiveConnection: Failed to connect to localhost:33133 21:48:34.495 pool-1-thread-1 ERROR ThriftServerWithSparkContextInBinarySuite: Error start hive server with Context org.scalatest.exceptions.TestFailedDueToTimeoutException: The code passed to eventually never returned normally. Attempted 33 times over 55.558287731 seconds. Last failure message: Could not open client transport with JDBC Uri: jdbc:hive2://localhost:33133/: Failed to setup metastore connection. at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432) at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439) at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391) at org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite.eventually(ThriftServerWithSparkContextSuite.scala:51) at org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308) at org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307) at org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite.eventually(ThriftServerWithSparkContextSuite.scala:51) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.startThriftServer(SharedThriftServer.scala:121) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$beforeAll$4(SharedThriftServer.scala:53) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at scala.util.Try$.apply(Try.scala:213) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$beforeAll$3(SharedThriftServer.scala:53) at scala.util.Failure.orElse(Try.scala:224) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$beforeAll$2(SharedThriftServer.scala:53) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$beforeAll$2$adapted(SharedThriftServer.scala:52) at scala.collection.TraversableOnce.$anonfun$foldLeft$1(TraversableOnce.scala:162) at scala.collection.TraversableOnce.$anonfun$foldLeft$1$adapted(TraversableOnce.scala:162) at scala.collection.immutable.Range.foreach(Range.scala:158) at scala.collection.TraversableOnce.foldLeft(TraversableOnce.scala:162) at scala.collection.TraversableOnce.foldLeft$(TraversableOnce.scala:160) at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:108) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.beforeAll(SharedThriftServer.scala:52) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.beforeAll$(SharedThriftServer.scala:49) at org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite.beforeAll(ThriftServerWithSparkContextSuite.scala:51) at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212) at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:59) at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317) at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510) at sbt.ForkMain$Run$2.call(ForkMain.java:296) at sbt.ForkMain$Run$2.call(ForkMain.java:286) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.sql.SQLException: Could not open client transport with JDBC Uri: jdbc:hive2://localhost:33133/: Failed to setup metastore connection at org.apache.hive.jdbc.HiveConnection.(HiveConnection.java:224) at org.apache.hive.jdbc.HiveDriver.connect(HiveDriver.java:107) at java.sql.DriverManager.getConnection(DriverManager.java:664) at java.sql.DriverManager.getConnection(DriverManager.java:247) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$withJdbcStatement$2(SharedThriftServer.scala:83) at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238) at scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36) at scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33) at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:38) at scala.collection.TraversableLike.map(TraversableLike.scala:238) at scala.collection.TraversableLike.map$(TraversableLike.scala:231) at scala.collection.AbstractTraversable.map(Traversable.scala:108) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.withJdbcStatement(SharedThriftServer.scala:83) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.withJdbcStatement$(SharedThriftServer.scala:79) at org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite.withJdbcStatement(ThriftServerWithSparkContextSuite.scala:51) at org.apache.spark.sql.hive.thriftserver.SharedThriftServer.$anonfun$startThriftServer$4(SharedThriftServer.scala:122) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395) at org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409) ... 35 more Caused by: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:267) at org.apache.hive.jdbc.Utils.verifySuccess(Utils.java:258) at org.apache.hive.jdbc.HiveConnection.openSession(HiveConnection.java:683) at org.apache.hive.jdbc.HiveConnection.(HiveConnection.java:200) ... 53 more Caused by: org.apache.hive.service.cli.HiveSQLException: Failed to setup metastore connection at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:60) at org.apache.hive.service.cli.session.SessionManager.openSession(SessionManager.java:264) at org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager.openSession(SparkSQLSessionManager.scala:57) at org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(CLIService.java:203) at org.apache.hive.service.cli.thrift.ThriftCLIService.getSessionHandle(ThriftCLIService.java:354) at org.apache.hive.service.cli.thrift.ThriftCLIService.OpenSession(ThriftCLIService.java:248) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1377) at org.apache.hive.service.rpc.thrift.TCLIService$Processor$OpenSession.getResult(TCLIService.java:1362) at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39) at org.apache.hive.service.auth.TSetIpAddressProcessor.process(TSetIpAddressProcessor.java:53) at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:310) ... 3 more Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:236) at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:388) at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:332) at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:312) at org.apache.hadoop.hive.ql.metadata.Hive.get(Hive.java:288) at org.apache.hive.service.cli.session.HiveSessionImplwithUGI.(HiveSessionImplwithUGI.java:58) ... 14 more Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:83) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133) at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3600) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3652) at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3632) at org.apache.hadoop.hive.ql.metadata.Hive.getAllFunctions(Hive.java:3894) at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:248) at org.apache.hadoop.hive.ql.metadata.Hive.registerAllFunctionsOnce(Hive.java:231) ... 19 more Caused by: java.lang.NoSuchMethodException: org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(org.apache.hadoop.hive.conf.HiveConf, org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean) at java.lang.Class.getConstructor0(Class.java:3082) at java.lang.Class.getDeclaredConstructor(Class.java:2178) at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705) ... 28 more 21:48:34.597 pool-1-thread-1 INFO HiveServer2: Shutting down HiveServer2 21:48:34.597 pool-1-thread-1 INFO ThriftCLIService: Thrift server has stopped 21:48:34.597 pool-1-thread-1 INFO AbstractService: Service:ThriftBinaryCLIService is stopped. 21:48:34.597 pool-1-thread-1 INFO AbstractService: Service:OperationManager is stopped. 21:48:34.597 pool-1-thread-1 INFO AbstractService: Service:SessionManager is stopped. 21:48:34.598 pool-1-thread-1 INFO AbstractService: Service:CLIService is stopped. 21:48:34.598 pool-1-thread-1 INFO AbstractService: Service:HiveServer2 is stopped. 21:48:34.598 pool-1-thread-1 INFO ThriftServerWithSparkContextInBinarySuite: Trying to start HiveThriftServer2: mode=binary, attempt=2 21:48:34.601 pool-1-thread-1 INFO HiveUtils: Initializing execution hive, version 2.3.7 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.service.shutdown.timeout=30s 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.amlauncher.thread-count=50 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.enabled=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.maximum=15 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.numa-awareness.numactl.cmd=/usr/bin/numactl 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.scheduler.heartbeat.interval-ms=1000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.timeline-client.number-of-async-entities-to-merge=10 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.timeout=60 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.kerberos.principal=HTTP/_HOST@LOCALHOST 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.loadedjob.tasks.max=-1 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.framework.name=local 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.uploader.server.thread-count=50 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.nonsecure-mode.user-pattern=^[_.A-Za-z0-9][-@_.A-Za-z0-9]{0,255}?[$]?$ 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:tfile.fs.output.buffer.size=262144 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.job.task.listener.thread-count=30 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.port.maxRetries=100 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.background.reload.threads=3 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.cross-origin.enabled=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.ftp.impl=org.apache.hadoop.fs.ftp.FtpFs 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.secure=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.shell.safely.delete.limit.num.files=100 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:dfs.bytes-per-checksum=512 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.acl-view-job= 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.background.sleep=25ms 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.limit=${fs.s3a.attempts.maximum} 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.loadedjobs.cache.size=5 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.table.create=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.enabled=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.with-user-dir=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.input.fileinputformat.split.minsize=0 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.container.liveness-monitor.interval-ms=600000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.client.thread-count=50 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.seqfile.compress.blocksize=1000000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.checksum.algo.impl=org.apache.hadoop.yarn.sharedcache.ChecksumSHA256Impl 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.interceptor-class.pipeline=org.apache.hadoop.yarn.server.nodemanager.amrmproxy.DefaultRequestInterceptor 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.leveldb-cache-read-cache-size=10485760 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.fetch.retry.interval-ms=1000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.warehouse.dir=file:/home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-c19ab0db-dd28-4773-902e-9cb9ddc681ba/metastore 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.maps=0-2 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.include-port-in-node-name=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.admin-env=MALLOC_ARENA_MAX=$MALLOC_ARENA_MAX 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.node-removal-untracked.timeout-ms=60000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.am.max-attempts=2 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.failover.sleep.base.millis=100 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.https.address=0.0.0.0:19890 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-labels.fs-store.impl.class=org.apache.hadoop.yarn.nodelabels.FileSystemNodeLabelsStore 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.collector-service.address=${yarn.nodemanager.hostname}:8048 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.trash.checkpoint.interval=0 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.rdbms.datastoreAdapterClassName=org.datanucleus.store.rdbms.adapter.DerbyAdapter 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.map.output.collector.class=org.apache.hadoop.mapred.MapTask$MapOutputBuffer 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.node-ip-cache.expiry-interval-secs=-1 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.signature.secret.file=*********(redacted) 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.jetty.logs.serve.aliases=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.handler=disabled 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.handler-thread-count=10 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.max-completed-applications=1000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.system-metrics-publisher.enabled=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.algorithm.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint.algorithm.DefaultPlacementAlgorithm 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.webapp.address=0.0.0.0:8788 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation.token.renew-interval=*********(redacted) 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.nm.uploader.replication.factor=10 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.identifierFactory=datanucleus1 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.negative-cache.secs=30 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.task.container.log.backups=0 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.skip.proc-count.auto-incr=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.posix.attr.gid.name=gidNumber 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.driver.host=amp-jenkins-worker-04.amp 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.fallback-to-simple-auth-allowed=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.enforced=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.DetachAllOnCommit=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.failover-proxy-provider=org.apache.hadoop.yarn.client.ConfiguredRMFailoverProxyProvider 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.http-authentication.simple.anonymous.allowed=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.check-interval.ms=1000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.acl.reservation-enable=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.store.class=org.apache.hadoop.yarn.server.resourcemanager.recovery.FileSystemRMStateStore 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.hard-kill-timeout-ms=10000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.etag.checksum.enabled=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-metrics.enable=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.fd-clean-interval-secs=60 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanagers.heartbeat-interval-ms=1000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.common.configuration.version=3.0.0 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.table.capacity.read=500 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.remote-app-log-dir-suffix=logs 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.windows-container.cpu-limit.enabled=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.privileged-containers.allowed=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.blocksize=67108864 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.retry.ceiling.ms=60000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.leveldb-store.path=${hadoop.tmp.dir}/yarn/system/confstore 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.in-memory.initial-delay-mins=10 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.principal=jhs/_HOST@REALM.TLD 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.skip.proc-count.auto-incr=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.name=file 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.reduces=0-2 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.num-retries=1000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.xfs-filter.enabled=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:seq.io.sort.mb=100 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.max.version=100 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.https.address=${yarn.timeline-service.hostname}:8190 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.NonTransactionalRead=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.address=${yarn.resourcemanager.hostname}:8030 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-labels.enabled=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.ui-actions.enabled=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.timeout=600000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.client-server.thread-count=50 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.shell.command.timeout=0s 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.crypto.cipher.suite=AES/CTR/NoPadding 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.elastic-memory-control.oom-handler=org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.DefaultOOMHandler 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.connect.max-wait.ms=900000 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.defaultFS=file:/// 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.use-rpc=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.transactionIsolation=read-committed 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.har.impl.disable.cache=true 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.ui2.enable=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.compression.codec.bzip2.library=system-native 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.distributed-scheduling.enabled=false 21:48:34.639 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.connection-keep-alive.timeout=5 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.https.address=${yarn.resourcemanager.hostname}:8090 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.address=0.0.0.0:10020 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-tokens.master-key-rolling-interval-secs=*********(redacted) 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.is.minicluster=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.address=${yarn.nodemanager.hostname}:0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.abfss.impl=org.apache.hadoop.fs.azurebfs.SecureAzureBlobFileSystem 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.s3a.impl=org.apache.hadoop.fs.s3a.S3A 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.combine.progress.records=10000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.epoch.range=0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.am.max-attempts=2 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.validateTables=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.hierarchy=/hadoop-yarn 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.wasbs.impl=org.apache.hadoop.fs.azure.Wasbs 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.cache-store-class=org.apache.hadoop.yarn.server.timeline.MemoryTimelineStore 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.transfer.mode=BLOCK_TRANSFER_MODE 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.start.cleanup.scratchdir=true 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.server.log.slow.rpc=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.cache.level2=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.node-labels.provider.fetch-interval-ms=1800000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.webapp.https.address=0.0.0.0:8091 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.testing=true 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.cross-origin.enabled=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.wasb.impl=org.apache.hadoop.fs.azure.NativeAzureFileSystem 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.auto-update.containers=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.job.committer.cancel-timeout=60000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.zk-store.parent-path=/confstore 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.default-container-executor.log-dirs.permissions=710 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.attempt.diagnostics.limit.kc=64 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.bytes-per-checksum=512 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory-mb=-1 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.driver.port=38658 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.abfs.impl=org.apache.hadoop.fs.azurebfs.Abfs 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.writer.flush-interval-seconds=60 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.fast.upload.active.blocks=4 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.credential.clear-text-fallback=true 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.collector-service.thread-count=5 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.secure.mode=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.joblist.cache.size=20000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.pre.event.listeners= 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.host=0.0.0.0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fs.state-store.num-retries=0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanager-connect-retries=10 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.num-log-files-per-app=30 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.low-watermark=0.3f 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.magic.enabled=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.max-retries=30 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:dfs.ha.fencing.ssh.connect-timeout=30000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation-enable=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.system-metrics-publisher.enabled=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.markreset.buffer.percent=0.0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.viewfs.impl=org.apache.hadoop.fs.viewfs.ViewFs 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.io.sort.factor=10 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.client.thread-count=25 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.new-active.rpc-timeout.ms=60000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-localizer.java.opts=-Xmx256m 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.datestring.cache.size=200000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.acl-modify-job= 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.windows-container.memory-limit.enabled=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.address=${yarn.timeline-service.hostname}:8188 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.PersistenceManagerFactoryClass=org.datanucleus.api.jdo.JDOPersistenceManagerFactory 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.job.committer.commit-window=10000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-manager.thread-count=20 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.fixed.ports=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.tags.system=YARN,HDFS,NAMENODE,DATANODE,REQUIRED,SECURITY,KERBEROS,PERFORMANCE,CLIENT ,SERVER,DEBUG,DEPRECATED,COMMON,OPTIONAL 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.integral.jdo.pushdown=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.cluster.max-application-priority=0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.ttl-enable=true 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.store.fs.uri=${hadoop.tmp.dir}/mapred/history/recoverystore 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.caller.context.signature.max.size=40 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.load.resource-types.from-server=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.zookeeper.session-timeout.ms=10000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.hadoop.fs.file.impl=org.apache.spark.DebugFilesystem 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:tfile.io.chunk.size=1048576 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.table.capacity.write=100 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.slowtaskthreshold=1.0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.serializations=org.apache.hadoop.io.serializer.WritableSerialization, org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization, org.apache.hadoop.io.serializer.avro.AvroReflectSerialization 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.failover.sleep.max.millis=2000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.directory.search.timeout=10000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.warehouse.dir=file:/home/jenkins/workspace/NewSparkPullRequestBuilder/sql/hive-thriftserver/spark-warehouse/org.apache.spark.sql.hive.thriftserver.ThriftServerWithSparkContextInBinarySuite 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.store.max-logs=1000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-attributes.provider.fetch-interval-ms=600000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.swift.impl=org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.local-cache.max-files-per-directory=8192 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.enabled=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.acl=world:anyone:rwcda 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.sort.spill.percent=0.80 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.scan-interval-seconds=60 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-attribute.fs-store.impl.class=org.apache.hadoop.yarn.server.resourcemanager.nodelabels.FileSystemNodeAttributeStore 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.interval=500ms 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.best-effort=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled=*********(redacted) 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.posix.attr.uid.name=uidNumber 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.swebhdfs.impl=org.apache.hadoop.fs.SWebHdfs 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.elastic-memory-control.timeout-sec=5 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.ifile.readahead=true 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.ttl-interval-ms=300000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.reader.webapp.address=${yarn.timeline-service.webapp.address} 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.algorithm.pool-size=1 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hbase.coprocessor.jar.hdfs.location=/hbase/coprocessor/hadoop-yarn-server-timelineservice.jar 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.num.refill.threads=2 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.command-opts=-Xmx1024m 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.cluster.local.dir=${hadoop.tmp.dir}/mapred/local 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.mapfile.bloom.error.rate=0.005 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.client.resolve.topology.enabled=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.allowed-runtimes=default 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.class=org.apache.hadoop.yarn.server.sharedcachemanager.store.InMemorySCMStore 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.graceful-fence.rpc-timeout.ms=5000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.replication=3 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.uid.cache.secs=14400 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.maxtaskfailures.per.tracker=3 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.metadatastore.impl=org.apache.hadoop.fs.s3a.s3guard.NullMetadataStore 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.skip.checksum.errors=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.file.impl=org.apache.spark.DebugFilesystem 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client-am.ipc.max-retries-on-timeouts=3 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.timeout=200000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.max.split.locations=15 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.max-queue-length=15 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.session.timeout.ms=60000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.cache-ttl.secs=300 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jvm.system-properties-to-log=os.name,os.version,java.home,java.runtime.version,java.vendor,java.version,java.vm.name,java.class.path,java.io.tmpdir,user.dir,user.name 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.opportunistic-container-allocation.nodes-used=10 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.active-dir=/tmp/entity-file-history/active 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.transfer.buffer.size=131072 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.retry-interval-ms=1000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.http.policy=HTTP_ONLY 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.memory.debugFill=true 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.socket.send.buffer=8192 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.abfss.impl=org.apache.hadoop.fs.azurebfs.Abfss 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.uploader.server.address=0.0.0.0:8046 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation-token.max-conf-size-bytes=*********(redacted) 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.token.validity=*********(redacted) 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.max.connections=0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.yarn.nodemanager.resource.memory-mb=4096 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.emit-timeline-data=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.system-reserved-memory-mb=-1 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.kerberos.min.seconds.before.relogin=60 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.move.thread-count=3 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.admin.client.thread-count=1 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.dispatcher.drain-events.timeout=300000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.buffer.dir=${hadoop.tmp.dir}/s3a 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.enabled.protocols=TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.admin.address=0.0.0.0:10033 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation-status.time-out.ms=600000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.uris= 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.assumed.role.sts.endpoint.region=us-west-1 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.port=13562 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.max-log-aggregation-diagnostics-in-memory=10 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.health-checker.interval-ms=600000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.in.test=true 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.clientrm.interceptor-class.pipeline=org.apache.hadoop.yarn.server.router.clientrm.DefaultClientRequestInterceptor 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-appid-node.split-index=0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.blocksize=67108864 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.sandbox-mode.local-dirs.permissions=read 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.rmadmin.interceptor-class.pipeline=org.apache.hadoop.yarn.server.router.rmadmin.DefaultRMAdminRequestInterceptor 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-container-debug-info.enabled=true 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.max-cached-nodemanagers-proxies=0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.delete-delay-ms=20 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.delete.debug-delay-sec=0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.pmem-check-enabled=true 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage=90.0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.app-submission.cross-platform=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.work-preserving-recovery.scheduling-wait-ms=10000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-retry-minimum-interval-ms=1000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.secs=300 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.enabled=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.local.sas.key.mode=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.maximum.data.length=67108864 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.max.threads=0 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.pipeline.cache-max-size=25 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.load-comparator=QUEUE_LENGTH 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.authorization=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.complete.cancel.delegation.tokens=*********(redacted) 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.ui.enabled=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.paging.maximum=5000 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:nfs.exports.allowed.hosts=* rw 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.ha.enable=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.http.policy=HTTP_ONLY 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.in-memory.check-period-mins=720 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.ssl=false 21:48:34.640 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.app.name=test-sql-context 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.rdbms.useLegacyNativeValueStrategy=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.application-client-protocol.poll-interval-ms=200 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.leveldb-store.compaction-interval-secs=86400 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.writer.class=org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.zookeeper.parent-znode=/hadoop-ha 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.server2.transport.mode=binary 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.policy.class=org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AllContainerLogAggregationPolicy 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.merge.percent=0.66 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.codegen.fallback=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.filter.group=(objectClass=group) 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.scheduler.pool-size=1 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.schema.verification=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resourcemanager.minimum.version=NONE 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.speculative-cap-running-tasks=0.1 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.admin.acl=* 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.supervised=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.admin.thread-count=1 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.automatic-failover.enabled=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.skip.maxgroups=0 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.connect.timeout=180000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.address=${yarn.resourcemanager.hostname}:8032 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.ping=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.local-fs.write-limit.bytes=-1 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.adl.oauth2.access.token.provider.type=*********(redacted) 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.ssl.file.buffer.size=65536 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.automatic-failover.embedded=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.gpu.docker-plugin=nvidia-docker-v1 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.enabled=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.purge=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.store.class=file 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.queue-limit-stdev=1.0f 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.max.attempts=5 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.output.fileoutputformat.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-monitor.procfs-tree.smaps-based-rss.enabled=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.bind.wildcard.addr=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.rest-csrf.enabled=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.connect-retry-interval.ms=1000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.keytab=/etc/krb5.keytab 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.keytab=/etc/security/keytab/jhs.service.keytab 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.threads.max=10 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.input.buffer.percent=0.70 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.allowed-container-networks=host,none,bridge 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-labels.resync-interval-ms=120000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.tmp.dir=/tmp/hadoop-${user.name} 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.maps=2 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.rest-csrf.custom-header=X-XSRF-Header 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.max.retry.interval=5000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.retain-check-interval-seconds=-1 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-tracker.client.thread-count=50 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.rm.system-metrics-publisher.emit-container-events=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.start-time-read-cache-size=10000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.automatic-failover.zk-base-path=/yarn-leader-election 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.seqfile.local.dir=${hadoop.tmp.dir}/io/local 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.throttle.retry.interval=100ms 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.wasb.impl=org.apache.hadoop.fs.azure.Wasb 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.validateConstraints=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.submit.file.replication=10 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.minicluster.fixed.ports=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.threshold=2147483647 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.done-dir=${yarn.app.mapreduce.am.staging-dir}/history/done 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.idlethreshold=4000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.strict-resource-usage=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.input.buffer.percent=0.0 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.userremapping-gid-threshold=1 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.rest-csrf.enabled=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.host.port=21 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.ping.interval=60000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.end.function.listeners= 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.history-writer.multi-threaded-dispatcher.pool-size=10 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.admin.address=${yarn.resourcemanager.hostname}:8033 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.client-write-packet-size=65536 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.kill.max=10 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.speculative=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.key.default.bitlength=128 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reducer.unconditional-preempt.delay.sec=300 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.interval-ms=120000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log.deletion-threads-count=4 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.filter-entity-list-by-user=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connection.maxidletime=10000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.io.sort.mb=100 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.client.thread-count=5 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.erasurecode.codec.rs.rawcoders=rs_native,rs_java 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.erasurecode.codec.rs-legacy.rawcoders=rs-legacy_java 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.admin.address=0.0.0.0:8047 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.algorithm.iterator=SERIAL 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.testkey=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.connectionPoolingType=BONECP 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.cache.cleanup.interval-ms=600000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.crypto.codec.classes.aes.ctr.nopadding=org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec, org.apache.hadoop.crypto.JceAesCtrCryptoCodec 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.cache.limit.max-resources-mb=0 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.ssl.enabled=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.process-kill-wait.ms=5000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.hdfs-servers=${fs.defaultFS} 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.workaround.non.threadsafe.getpwuid=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.df.interval=60000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multiobjectdelete.enable=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.cleaner.resource-sleep-ms=0 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.min-healthy-disks=0.25 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.shell.missing.defaultFs.warning=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.file.buffer.size=65536 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.attr.member=member 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.random.device.file.path=/dev/urandom 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.sensitive-config-keys=*********(redacted) 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.ddb.max.retries=9 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.rpc.socket.factory.class.default=org.apache.hadoop.net.StandardSocketFactory 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.intermediate-data-encryption.enable=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.connect.retry-interval.ms=30000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container.stderr.pattern={*stderr*,*STDERR*} 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.minimum-allocation-mb=1024 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.staging-dir=/tmp/hadoop-yarn/staging 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.read.timeout=180000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.max-age=1800 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.erasurecode.codec.xor.rawcoders=xor_native,xor_java 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.connection.establish.timeout=5000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.running.map.limit=0 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.minicluster.control-resource-monitoring=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.require.client.cert=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.kerberos.kinit.command=kinit 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.state-store.class=org.apache.hadoop.yarn.server.federation.store.impl.MemoryFederationStateStore 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.ui.showConsoleProgress=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.log.level=INFO 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.dns.log-slow-lookups.threshold.ms=1000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.ubertask.enable=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:adl.http.timeout=-1 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.placement-constraints.retry-attempts=3 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.caller.context.enabled=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.vmem-pmem-ratio=2.1 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.rpc.protection=authentication 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.rpc-timeout.ms=45000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.remote-app-log-dir=/tmp/logs 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.timeout-ms=10000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.s3guard.cli.prune.age=86400000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.pcores-vcores-multiplier=1.0 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.sandbox-mode=disabled 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.containerlauncher.threadpool-initial-size=10 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.master.rest.enabled=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.threads=8 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.zk.retry-interval-ms=1000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.crypto.buffer.size=8192 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-labels.provider.fetch-interval-ms=600000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.store.leveldb.path=${hadoop.tmp.dir}/mapred/history/recoverystore 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.failover-retries-on-socket-timeouts=0 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.enabled=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.authorization.caching.enable=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.instrumentation.requires.admin=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.delete.thread-count=4 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.finish-when-all-reducers-done=true 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.jaas.context=Client 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.path=${hadoop.tmp.dir}/yarn/timeline 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionUserName=APP 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.map.index.interval=128 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.max-queue-wait-time-ms=100 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.abfs.impl=org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.counters.max=120 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.rest-csrf.enabled=false 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.store-class=org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.move.interval-ms=180000 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.fetch.thread-count=4 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.client.thread-count=50 21:48:34.641 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.hostname.verifier=DEFAULT 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-state-store.path=${hadoop.tmp.dir}/yarn/timeline 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.classloader=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.map.params=${mapreduce.task.profile.params} 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.timeout=20000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.auth_to_local.mechanism=hadoop 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.app-collector.linger-period.ms=60000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nm.liveness-monitor.expiry-interval-ms=600000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.reservation-system.planfollower.time-step=1000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.enable-userremapping.allowed=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.webapp.api-service.enable=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.enabled=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.retry.interval=1000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.du.interval=600000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.impl=org.apache.hadoop.fs.ftp.FTPFileSystem 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container.stderr.tail.bytes=4096 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.read.timeout.ms=60000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.warn.after.ms=5000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.bytes-per-checksum=512 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.outputcommitter.factory.scheme.s3a=org.apache.hadoop.fs.s3a.commit.S3ACommitterFactory 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.groups.cache.background.reload=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-monitor.enabled=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.elastic-memory-control.enabled=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:net.topology.script.number.args=100 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.merge.progress.records=10000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.address=${yarn.nodemanager.hostname}:8040 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.keytab=/etc/krb5.keytab 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.fetch.retry.timeout-ms=30000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.rm.container-allocation.expiry-interval-ms=600000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.fileoutputcommitter.algorithm.version=2 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.work-preserving-recovery.enabled=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.skip.maxrecords=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.root-dir=/sharedcache 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.throttle.limit=${fs.s3a.attempts.maximum} 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.server2.thrift.http.port=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.type=simple 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.cache.limit.max-resources=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.userlog.limit.kb=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.monitor.enable=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.max.retries=10 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.retry.times=5 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-monitor.interval-ms=3000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.unsafe.exceptionOnMemoryLeak=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.gpu.allowed-gpu-devices=auto 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.executor.id=driver 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.sharedcache.mode=disabled 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.rest-csrf.custom-header=X-XSRF-Header 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.listen.queue.size=128 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.mutation.acl-policy.class=org.apache.hadoop.yarn.server.resourcemanager.scheduler.DefaultConfigurationMutationACLPolicy 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.cpu.vcores=1 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.file-formats=TFile 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.fd-retain-secs=300 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.user.group.static.mapping.overrides=dr.who=; 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.validateColumns=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.sas.expiry.period=90d 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.store.class=org.apache.hadoop.mapreduce.v2.hs.HistoryServerFileSystemStateStoreService 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fail-fast=${yarn.fail-fast} 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.proxy-user-privileges.enabled=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.webapp.interceptor-class.pipeline=org.apache.hadoop.yarn.server.router.webapp.DefaultRequestInterceptorREST 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.cgroups.soft-limit-percentage=90.0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reducer.preempt.delay.sec=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.util.hash.type=murmur 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.storeManagerType=rdbms 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-validator=basic 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client.job.max-retries=3 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.retry-delay.max.ms=60000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.connection.timeout.ms=60000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.params=-agentlib:hprof=cpu=samples,heap=sites,force=n,thread=y,verbose=n,file=%s 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.shuffle.log.backups=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.server2.thrift.port=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-diagnostics-maximum-size=10000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.retry.interval.ms=1000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.delete-timeout-ms=1000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.file.impl=org.apache.hadoop.fs.local.LocalFs 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.roll-monitoring-interval-seconds=-1 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.cleaner.interval-ms=86400000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.quorum=localhost:2181 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.output.fileoutputformat.compress=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.am-rm-tokens.master-key-rolling-interval-secs=*********(redacted) 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.assumed.role.session.duration=30m 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.conversion.rule=none 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.server.conf=ssl-server.xml 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.retry.throttle.interval=1000ms 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:seq.io.sort.factor=100 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.cleaner.initial-delay-mins=10 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.completion.pollinterval=5000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.keystores.factory.class=org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.resource.cpu-vcores=1 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.enabled=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.capabilities=CHOWN,DAC_OVERRIDE,FSETID,FOWNER,MKNOD,NET_RAW,SETGID,SETUID,SETFCAP,SETPCAP,NET_BIND_SERVICE,SYS_CHROOT,KILL,AUDIT_WRITE 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.acl.enable=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.done-dir=/tmp/entity-file-history/done/ 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fs.state-store.uri=${hadoop.tmp.dir}/yarn/system/rmstore 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.always-scan-user-dir=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.opportunistic-containers-use-pause-for-preemption=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.nonsecure-mode.local-user=nobody 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.sql.optimizer.excludedRules=org.apache.spark.sql.catalyst.optimizer.ConvertToLocalRelation 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.reader.class=org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.configuration.provider-class=org.apache.hadoop.yarn.LocalConfigurationProvider 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.userremapping-uid-threshold=1 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.configuration.file-system-based-store=/yarn/conf 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.cache.limit.max-single-resource-mb=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.stop.grace-period=10 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-profiles.source-file=resource-profiles.json 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.percentage-physical-cpu-limit=100 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.client.thread-count=10 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:tfile.fs.input.buffer.size=262144 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.progressmonitor.pollinterval=1000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-dirs=${yarn.log.dir}/userlogs 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.automatic.close=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.hostname=0.0.0.0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.memory.cgroups.swappiness=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.stream-buffer-size=4096 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.fail-fast=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.app-aggregation-interval-secs=15 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.filter.user=(&(objectClass=user)(sAMAccountName={0})) 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-localizer.log.level=INFO 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.address=${yarn.timeline-service.hostname}:10200 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.ubertask.maxmaps=9 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.threads.keepalivetime=60 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.files.preserve.failedtasks=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client.job.retry-interval=2000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.graceful-fence.connection.retries=1 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation.token.max-lifetime=*********(redacted) 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.drain-entities.timeout.ms=2000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.fpga.vendor-plugin.class=org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.fpga.IntelFpgaOpenclPlugin 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.summary-store=org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.cpu.vcores=1 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.encrypted-intermediate-data.buffer.kb=128 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.client.resolve.remote.symlinks=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.https.address=0.0.0.0:8044 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.allowed-origins=* 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.encrypted-intermediate-data=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.retain-seconds=604800 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.metrics.runtime.buckets=60,300,1440 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.generic-application-history.max-applications=10000 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.local-dirs=${hadoop.tmp.dir}/nm-local-dir 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.connection-keep-alive.enable=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.node-labels.configuration-type=centralized 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.path.style.access=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.aux-services.mapreduce_shuffle.class=org.apache.hadoop.mapred.ShuffleHandler 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.store.in-memory.staleness-period-mins=10080 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.adl.impl=org.apache.hadoop.fs.adl.AdlFileSystem 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanager.minimum.version=NONE 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.staging-dir.erasurecoding.enabled=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:net.topology.impl=org.apache.hadoop.net.NetworkTopology 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.map.index.skip=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.reader.webapp.https.address=${yarn.timeline-service.webapp.https.address} 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.ftp.data.connection.mode=ACTIVE_LOCAL_DATA_CONNECTION_MODE 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.local-fs.single-disk-limit.check.kill-limit-exceed=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.maximum-allocation-vcores=4 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.allowed-headers=X-Requested-With,Content-Type,Accept,Origin 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log-aggregation.compression-type=none 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.version=1.0f 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.ipc.rpc.class=org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.maxattempts=4 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.dns.log-slow-lookups.enabled=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.committer.setup.cleanup.needed=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.master=local[2] 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.running.reduce.limit=0 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.maximum.response.length=134217728 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.token.tracking.ids.enabled=*********(redacted) 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.caller.context.max.size=128 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.host-pid-namespace.allowed=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.delayed-removal.allowed=false 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.system.acls=sasl:yarn@, sasl:mapred@, sasl:hdfs@ 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.dir=${hadoop.tmp.dir}/yarn-nm-recovery 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.fast.upload.buffer=disk 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.intermediate-done-dir=${yarn.app.mapreduce.am.staging-dir}/history/done_intermediate 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.shuffle.log.separate=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.max.total.tasks=5 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.readahead.range=64K 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.rawstore.impl=org.apache.hadoop.hive.metastore.ObjectStore 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.simple.anonymous.allowed=true 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.attempts.maximum=20 21:48:34.642 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.connection.timeout.ms=15000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation-token-renewer.thread-count=*********(redacted) 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.health-checker.script.timeout-ms=1200000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.start-time-write-cache-size=10000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-profiles.enabled=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hbase-schema.prefix=prod. 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.authorization=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.log.level=INFO 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.decommissioning-nodes-watcher.poll-interval-secs=20 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.output.fileoutputformat.compress.type=RECORD 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.leveldb-state-store.path=${hadoop.tmp.dir}/yarn/system/rmstore 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.rest-csrf.custom-header=X-XSRF-Header 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.ifile.readahead.bytes=4194304 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.app-checker.class=org.apache.hadoop.yarn.server.sharedcachemanager.RemoteAppChecker 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.nonsecure-mode.limit-users=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.detect-hardware-capabilities=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.cluster.acls.enabled=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.retry-after-no-speculate=1000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.group.hierarchy.levels=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.fs.state-store.retry-interval-ms=1000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.stream-buffer-size=4096 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.application-timeouts.monitor.interval-ms=3000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.output.compress.codec=org.apache.hadoop.io.compress.DefaultCodec 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.speculative=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.retry-after-speculate=15000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.connectionPool.maxPoolSize=10 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.cgroups.mount=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.container.log.backups=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.log.level=INFO 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reduce.slowstart.completedmaps=0.05 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.http-authentication.type=simple 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.ldap.search.attr.group.name=cn 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.fpga.allowed-fpga-devices=0,1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.internal-timers-ttl-secs=420 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.logs.enabled=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.block.size=32M 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.client-server.address=0.0.0.0:8045 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.logaggregation.threadpool-size-max=100 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.hostname=0.0.0.0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delegation.key.update-interval=86400000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.fetch.retry.enabled=${yarn.nodemanager.recovery.enabled} 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.memory.mb=-1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.skip.start.attempts=2 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.hdfs.impl=org.apache.hadoop.fs.Hdfs 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.enable=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.cache.level2.type=none 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.tcpnodelay=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.rpc-timeout.ms=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.low-latency=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.input.lineinputformat.linespermap=1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.interceptor.user.threadpool-size=5 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionURL=*********(redacted) 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.schema.autoCreateAll=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.max.retries.on.timeouts=45 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.leveldb-timeline-store.read-cache-size=104857600 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.har.impl=org.apache.hadoop.fs.HarFs 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.split.metainfo.maxsize=10000000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.am.liveness-monitor.expiry-interval-ms=600000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.container-tokens.master-key-rolling-interval-secs=*********(redacted) 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.app-cache-size=10 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.socket.recv.buffer=8192 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.resource-tracker.address=${yarn.resourcemanager.hostname}:8031 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-labels.provider.fetch-timeout-ms=1200000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.heap.memory-mb.ratio=0.8 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.leveldb-state-store.compaction-interval-secs=3600 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.rest-csrf.custom-header=X-XSRF-Header 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.configuration.fs.path=file://${hadoop.tmp.dir}/yarn/system/schedconf 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.output.filter=FAILED 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.filter.initializers=org.apache.hadoop.http.lib.StaticUserWebFilter 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.memory.mb=-1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hostname=0.0.0.0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionDriverName=org.apache.derby.jdbc.EmbeddedDriver 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:file.replication=1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-metrics.unregister-delay-ms=10000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-metrics.period-ms=-1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.fileoutputcommitter.task.cleanup.enabled=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.log.retain-seconds=10800 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.plugin.pluginRegistryBundleCheck=LOG 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.entity-group-fs-store.cleaner-interval-seconds=3600 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.keytab=/etc/krb5.keytab 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.metastore.event.listeners= 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping.providers.combined=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.merge.inmem.threshold=1000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.recovery.enabled=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.saskey.usecontainersaskeyforallaccess=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.nm.uploader.thread-count=20 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nodemanager-graceful-decommission-timeout-secs=3600 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.shuffle.ssl.enabled=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.hbase.coprocessor.app-final-value-retention-milliseconds=259200000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.abort.pending.uploads=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.opportunistic-containers-max-queue-length=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.state-store.max-completed-applications=${yarn.resourcemanager.max-completed-applications} 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.ConnectionPassword=*********(redacted) 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.minimum-allowed-tasks=10 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.test.home=/home/jenkins/workspace/NewSparkPullRequestBuilder 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.retain-seconds=-1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.disk-health-checker.min-free-space-per-disk-mb=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.max-age-ms=604800000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.cross-origin.allowed-methods=GET,POST,HEAD 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.opportunistic-container-allocation.enabled=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.webapp.address=0.0.0.0:19888 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.system.tags=YARN,HDFS,NAMENODE,DATANODE,REQUIRED,SECURITY,KERBEROS,PERFORMANCE,CLIENT ,SERVER,DEBUG,DEPRECATED,COMMON,OPTIONAL 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.log-aggregation.file-controller.TFile.class=org.apache.hadoop.yarn.logaggregation.filecontroller.tfile.LogAggregationTFileController 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.nodemanager-connect.max-wait-ms=180000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.webapp.address=${yarn.resourcemanager.hostname}:8088 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.recovery.enable=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.parallelcopies=5 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.webhdfs.impl=org.apache.hadoop.fs.WebHdfs 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.trash.interval=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client.max-retries=3 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.authentication=simple 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.profile.reduce.params=${mapreduce.task.profile.params} 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.resource.mb=1536 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.input.fileinputformat.list-status.num-threads=1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.container-executor.class=org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:io.mapfile.bloom.size=1048576 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.ttl-ms=604800000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.min-queue-length=5 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.cpu-vcores=-1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reduces=1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.size=100M 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.minimum-allocation-vcores=1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.speculative.speculative-cap-total-tasks=0.01 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.ssl.client.conf=ssl-client.xml 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.queuename=default 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.encrypted-intermediate-data-key-size-bits=128 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.metadatastore.authoritative=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.xfs-filter.xframe-options=SAMEORIGIN 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.health-monitor.sleep-after-disconnect.ms=1000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.shuffle.log.limit.kb=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.group.mapping=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.application-client-protocol.poll-timeout-ms=-1 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.jhist.format=binary 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:datanucleus.rdbms.initializeColumnInfo=NONE 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.ha.enabled=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.staticuser.user=dr.who 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.exit.timeout.check-interval-ms=20000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.intermediate-user-done-dir.permissions=770 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.task.exit.timeout=60000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.linux-container-executor.resources-handler.class=org.apache.hadoop.yarn.server.nodemanager.util.DefaultLCEResourcesHandler 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.reduce.shuffle.memory.limit.percent=0.25 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.reservation-system.enable=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.output.compress=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.zookeeper.acl=world:anyone:rwcda 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.server.max.connections=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.runtime.linux.docker.default-container-network=host 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.router.webapp.address=0.0.0.0:8089 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.maximum-allocation-mb=8192 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.scheduler.monitor.policies=org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity.ProportionalCapacityPreemptionPolicy 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.cleaner.period-mins=1440 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource-plugins.gpu.docker-plugin.nvidia-docker-v1.endpoint=http://localhost:3476/v1.0/docker/cli 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.am.container.log.limit.kb=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.client.connect.retry.interval=1000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.http-cross-origin.enabled=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.wasbs.impl=org.apache.hadoop.fs.azure.NativeAzureFileSystem$Secure 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.subcluster-resolver.class=org.apache.hadoop.yarn.server.federation.resolver.DefaultSubClusterResolverImpl 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-state-store.parent-path=/rmstore 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.cleaner.enable=true 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.client.fd-flush-interval-secs=10 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.expiry=43200000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.nodemanager-client-async.thread-pool-max-size=500 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.map.maxattempts=4 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hive.exec.scratchdir=/home/jenkins/workspace/NewSparkPullRequestBuilder/target/tmp/spark-5af5b8f4-f1f4-4ef4-b2af-944add6f6c28 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.sorting-nodes-interval-ms=1000 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.tmp.path=tmp/staging 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.sleep-delay-before-sigkill.ms=250 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.nm-container-queuing.min-queue-wait-time-ms=10 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.end-notification.retry.attempts=0 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.resource.count-logical-processors-as-cores=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.registry.zk.root=/registry 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:adl.feature.ownerandgroup.enableupn=false 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-max-znode-size.bytes=1048576 21:48:34.643 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.reduce.shuffle.consumer.plugin.class=org.apache.hadoop.mapreduce.task.reduce.Shuffle 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.delayed.delegation-token.removal-interval-ms=*********(redacted) 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.localizer.cache.target-size-mb=10240 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.conflict-mode=fail 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:javax.jdo.option.Multithreaded=true 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.client.libjars.wildcard=true 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.committer.staging.unique-filenames=true 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.node-attributes.provider.fetch-timeout-ms=1200000 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.list.version=2 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ftp.client-write-packet-size=65536 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.AbstractFileSystem.adl.impl=org.apache.hadoop.fs.adl.Adl 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.key.default.cipher=AES/CTR/NoPadding 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.failover-retries=0 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.multipart.purge.age=86400 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.local-fs.single-disk-limit.check.interval-ms=5000 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:net.topology.node.switch.mapping.impl=org.apache.hadoop.net.ScriptBasedMapping 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.amrmproxy.address=0.0.0.0:8049 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ipc.server.listen.queue.size=128 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:map.sort.class=org.apache.hadoop.util.QuickSort 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.viewfs.rename.strategy=SAME_MOUNTPOINT 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.authentication.retry-count=1 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.permissions.umask-mode=022 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.s3a.assumed.role.credentials.provider=org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.vmem-check-enabled=true 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.numa-awareness.enabled=false 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.recovery.compaction-interval-secs=3600 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.app.mapreduce.client-am.ipc.max-retries=3 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.federation.registry.base-dir=yarnfederation/ 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:spark.app.id=local-1593406028241 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.max.map=-1 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.local-fs.single-disk-limit.bytes=-1 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.job.ubertask.maxreduces=1 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.kms.client.encrypted.key.cache.size=500 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.security.java.secure.random.algorithm=SHA1PRNG 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:ha.failover-controller.cli-check.rpc-timeout.ms=20000 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.jobname.limit=50 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.client.nodemanager-connect.retry-interval-ms=10000 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.state-store-class=org.apache.hadoop.yarn.server.timeline.recovery.LeveldbTimelineStateStore 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.env-whitelist=JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_HOME,PATH,LANG,TZ 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.sharedcache.nested-level=3 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.rest-csrf.methods-to-ignore=GET,OPTIONS,HEAD 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:fs.azure.user.agent.prefix=unknown 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.zk-delegation-token-node.split-index=*********(redacted) 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.numa-awareness.read-topology=false 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.nodemanager.webapp.address=${yarn.nodemanager.hostname}:8042 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:rpc.metrics.quantile.enable=false 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.registry.class=org.apache.hadoop.registry.client.impl.FSRegistryOperationsService 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:mapreduce.jobhistory.admin.acl=* 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.system-metrics-publisher.dispatcher.pool-size=10 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.scheduler.queue-placement-rules=user-group 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:hadoop.http.authentication.kerberos.keytab=${user.home}/hadoop.keytab 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.resourcemanager.recovery.enabled=false 21:48:34.644 pool-1-thread-1 DEBUG HiveClientImpl: Applying Hadoop/Hive/Spark and extra properties to Hive Conf:yarn.timeline-service.webapp.rest-csrf.enabled=false 21:48:34.644 pool-1-thread-1 DEBUG SessionState: SessionState user: null 21:48:34.645 pool-1-thread-1 DEBUG Utilities: Create dirs /home/jenkins/workspace/NewSparkPullRequestBuilder/target