20/07/01 11:33:05.760 main INFO CoarseGrainedExecutorBackend: Started daemon with process name: 208774@amp-jenkins-worker-04 20/07/01 11:33:05.769 main INFO SignalUtils: Registering signal handler for TERM 20/07/01 11:33:05.770 main INFO SignalUtils: Registering signal handler for HUP 20/07/01 11:33:05.771 main INFO SignalUtils: Registering signal handler for INT 20/07/01 11:33:06.181 main DEBUG MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[Rate of successful kerberos logins and latency (milliseconds)]) 20/07/01 11:33:06.186 main DEBUG MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[Rate of failed kerberos logins and latency (milliseconds)]) 20/07/01 11:33:06.186 main DEBUG MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[GetGroups]) 20/07/01 11:33:06.187 main DEBUG MutableMetricsFactory: field private org.apache.hadoop.metrics2.lib.MutableGaugeLong org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailuresTotal with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[Renewal failures since startup]) 20/07/01 11:33:06.187 main DEBUG MutableMetricsFactory: field private org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(sampleName=Ops, always=false, valueName=Time, about=, interval=10, type=DEFAULT, value=[Renewal failures since last successful login]) 20/07/01 11:33:06.189 main DEBUG MetricsSystemImpl: UgiMetrics, User and group related metrics 20/07/01 11:33:06.204 main DEBUG SecurityUtil: Setting hadoop.security.token.service.use_ip to true 20/07/01 11:33:06.212 main DEBUG Shell: Failed to detect a valid hadoop home directory java.io.FileNotFoundException: HADOOP_HOME and hadoop.home.dir are unset. at org.apache.hadoop.util.Shell.checkHadoopHomeInner(Shell.java:468) at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:439) at org.apache.hadoop.util.Shell.(Shell.java:516) at org.apache.hadoop.util.StringUtils.(StringUtils.java:78) at org.apache.hadoop.conf.Configuration.getBoolean(Configuration.java:1664) at org.apache.hadoop.security.SecurityUtil.setConfigurationInternal(SecurityUtil.java:104) at org.apache.hadoop.security.SecurityUtil.(SecurityUtil.java:88) at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:316) at org.apache.hadoop.security.UserGroupInformation.setConfiguration(UserGroupInformation.java:365) at org.apache.spark.deploy.SparkHadoopUtil.(SparkHadoopUtil.scala:50) at org.apache.spark.deploy.SparkHadoopUtil$.instance$lzycompute(SparkHadoopUtil.scala:397) at org.apache.spark.deploy.SparkHadoopUtil$.instance(SparkHadoopUtil.scala:397) at org.apache.spark.deploy.SparkHadoopUtil$.get(SparkHadoopUtil.scala:418) at org.apache.spark.executor.CoarseGrainedExecutorBackend$.run(CoarseGrainedExecutorBackend.scala:321) at org.apache.spark.executor.CoarseGrainedExecutorBackend$.main(CoarseGrainedExecutorBackend.scala:310) at org.apache.spark.executor.CoarseGrainedExecutorBackend.main(CoarseGrainedExecutorBackend.scala) 20/07/01 11:33:06.229 main DEBUG Shell: setsid exited with exit code 0 20/07/01 11:33:06.249 main DEBUG Groups: Creating new Groups object 20/07/01 11:33:06.252 main DEBUG NativeCodeLoader: Trying to load the custom-built native-hadoop library... 20/07/01 11:33:06.252 main DEBUG NativeCodeLoader: Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path 20/07/01 11:33:06.252 main DEBUG NativeCodeLoader: java.library.path=/usr/java/packages/lib/amd64:/usr/lib64:/lib64:/lib:/usr/lib 20/07/01 11:33:06.252 main WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 20/07/01 11:33:06.253 main DEBUG PerformanceAdvisory: Falling back to shell based 20/07/01 11:33:06.255 main DEBUG JniBasedUnixGroupsMappingWithFallback: Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping 20/07/01 11:33:06.357 main DEBUG Groups: Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000 20/07/01 11:33:06.360 main DEBUG SparkHadoopUtil: creating UGI for user: jenkins 20/07/01 11:33:06.367 main DEBUG UserGroupInformation: hadoop login 20/07/01 11:33:06.367 main DEBUG UserGroupInformation: hadoop login commit 20/07/01 11:33:06.369 main DEBUG UserGroupInformation: using local user:UnixPrincipal: jenkins 20/07/01 11:33:06.369 main DEBUG UserGroupInformation: Using user: "UnixPrincipal: jenkins" with name jenkins 20/07/01 11:33:06.369 main DEBUG UserGroupInformation: User entry: "jenkins" 20/07/01 11:33:06.370 main DEBUG UserGroupInformation: UGI loginUser:jenkins (auth:SIMPLE) 20/07/01 11:33:06.373 main DEBUG UserGroupInformation: PrivilegedAction as:jenkins (auth:SIMPLE) from:org.apache.spark.deploy.SparkHadoopUtil.runAsSparkUser(SparkHadoopUtil.scala:61) 20/07/01 11:33:06.388 main INFO SecurityManager: Changing view acls to: jenkins 20/07/01 11:33:06.389 main INFO SecurityManager: Changing modify acls to: jenkins 20/07/01 11:33:06.389 main INFO SecurityManager: Changing view acls groups to: 20/07/01 11:33:06.390 main INFO SecurityManager: Changing modify acls groups to: 20/07/01 11:33:06.390 main INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); groups with view permissions: Set(); users with modify permissions: Set(jenkins); groups with modify permissions: Set() 20/07/01 11:33:06.707 netty-rpc-connection-0 DEBUG TransportClientFactory: Creating new connection to amp-jenkins-worker-04.amp/192.168.10.24:35835 20/07/01 11:33:06.891 netty-rpc-connection-0 DEBUG TransportClientFactory: Connection to amp-jenkins-worker-04.amp/192.168.10.24:35835 successful, running bootstraps... 20/07/01 11:33:06.891 netty-rpc-connection-0 INFO TransportClientFactory: Successfully created connection to amp-jenkins-worker-04.amp/192.168.10.24:35835 after 121 ms (0 ms spent in bootstraps) 20/07/01 11:33:07.036 main INFO SecurityManager: Changing view acls to: jenkins 20/07/01 11:33:07.036 main INFO SecurityManager: Changing modify acls to: jenkins 20/07/01 11:33:07.037 main INFO SecurityManager: Changing view acls groups to: 20/07/01 11:33:07.037 main INFO SecurityManager: Changing modify acls groups to: 20/07/01 11:33:07.037 main INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(jenkins); groups with view permissions: Set(); users with modify permissions: Set(jenkins); groups with modify permissions: Set() 20/07/01 11:33:07.062 main DEBUG SparkEnv: Using serializer: class org.apache.spark.serializer.JavaSerializer 20/07/01 11:33:07.094 netty-rpc-connection-0 DEBUG TransportClientFactory: Creating new connection to amp-jenkins-worker-04.amp/192.168.10.24:35835 20/07/01 11:33:07.097 netty-rpc-connection-0 DEBUG TransportClientFactory: Connection to amp-jenkins-worker-04.amp/192.168.10.24:35835 successful, running bootstraps... 20/07/01 11:33:07.097 netty-rpc-connection-0 INFO TransportClientFactory: Successfully created connection to amp-jenkins-worker-04.amp/192.168.10.24:35835 after 2 ms (0 ms spent in bootstraps) 20/07/01 11:33:07.168 main INFO DiskBlockManager: Created local directory at /tmp/spark-c161bb4f-7487-48d2-bfb9-e261417dfa17/executor-a9c1de7e-75c1-4f10-9caf-a815379bb8c8/blockmgr-ade75c3e-f4ee-4a45-a067-c42cfdb3cc96 20/07/01 11:33:07.169 main DEBUG DiskBlockManager: Adding shutdown hook 20/07/01 11:33:07.171 main DEBUG ShutdownHookManager: Adding shutdown hook 20/07/01 11:33:07.219 main INFO MemoryStore: MemoryStore started with capacity 366.3 MiB 20/07/01 11:33:07.518 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Registering PWR handler. 20/07/01 11:33:07.519 dispatcher-Executor INFO SignalUtils: Registering signal handler for PWR 20/07/01 11:33:07.519 main INFO WorkerWatcher: Connecting to worker spark://Worker@192.168.10.24:34189 20/07/01 11:33:07.519 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Connecting to driver: spark://CoarseGrainedScheduler@amp-jenkins-worker-04.amp:35835 20/07/01 11:33:07.520 dispatcher-Executor DEBUG CoarseGrainedExecutorBackend: Resource profile id is: 0 20/07/01 11:33:07.520 netty-rpc-connection-1 DEBUG TransportClientFactory: Creating new connection to /192.168.10.24:34189 20/07/01 11:33:07.522 netty-rpc-connection-1 DEBUG TransportClientFactory: Connection to /192.168.10.24:34189 successful, running bootstraps... 20/07/01 11:33:07.522 netty-rpc-connection-1 INFO TransportClientFactory: Successfully created connection to /192.168.10.24:34189 after 1 ms (0 ms spent in bootstraps) 20/07/01 11:33:07.526 dispatcher-event-loop-0 INFO WorkerWatcher: Successfully connected to spark://Worker@192.168.10.24:34189 20/07/01 11:33:07.534 dispatcher-Executor INFO ResourceUtils: ============================================================== 20/07/01 11:33:07.535 dispatcher-Executor INFO ResourceUtils: No custom resources configured for spark.executor. 20/07/01 11:33:07.535 dispatcher-Executor INFO ResourceUtils: ============================================================== 20/07/01 11:33:07.563 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Successfully registered with driver 20/07/01 11:33:07.566 dispatcher-Executor INFO Executor: Starting executor ID 0 on host 192.168.10.24 20/07/01 11:33:07.658 dispatcher-Executor DEBUG TransportServer: Shuffle server started on port: 39609 20/07/01 11:33:07.660 dispatcher-Executor INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 39609. 20/07/01 11:33:07.660 dispatcher-Executor INFO NettyBlockTransferService: Server created on 192.168.10.24:39609 20/07/01 11:33:07.662 dispatcher-Executor INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy 20/07/01 11:33:07.672 dispatcher-Executor INFO BlockManagerMaster: Registering BlockManager BlockManagerId(0, 192.168.10.24, 39609, None) 20/07/01 11:33:07.695 dispatcher-Executor INFO BlockManagerMaster: Registered BlockManager BlockManagerId(0, 192.168.10.24, 39609, None) 20/07/01 11:33:07.695 dispatcher-Executor INFO BlockManager: Initialized BlockManager: BlockManagerId(0, 192.168.10.24, 39609, None) 20/07/01 11:33:07.775 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 0 20/07/01 11:33:07.791 Executor task launch worker for task 0 INFO Executor: Running task 0.0 in stage 0.0 (TID 0) 20/07/01 11:33:07.824 Executor task launch worker for task 0 INFO Executor: Fetching spark://amp-jenkins-worker-04.amp:35835/jars/testJar-1593628380091.jar with timestamp 1593628383340 20/07/01 11:33:07.851 Executor task launch worker for task 0 DEBUG TransportClientFactory: Creating new connection to amp-jenkins-worker-04.amp/192.168.10.24:35835 20/07/01 11:33:07.853 Executor task launch worker for task 0 DEBUG TransportClientFactory: Connection to amp-jenkins-worker-04.amp/192.168.10.24:35835 successful, running bootstraps... 20/07/01 11:33:07.853 Executor task launch worker for task 0 INFO TransportClientFactory: Successfully created connection to amp-jenkins-worker-04.amp/192.168.10.24:35835 after 2 ms (0 ms spent in bootstraps) 20/07/01 11:33:07.854 Executor task launch worker for task 0 DEBUG TransportClient: Sending stream request for /jars/testJar-1593628380091.jar to amp-jenkins-worker-04.amp/192.168.10.24:35835 20/07/01 11:33:07.856 Executor task launch worker for task 0 INFO Utils: Fetching spark://amp-jenkins-worker-04.amp:35835/jars/testJar-1593628380091.jar to /tmp/spark-c161bb4f-7487-48d2-bfb9-e261417dfa17/executor-a9c1de7e-75c1-4f10-9caf-a815379bb8c8/spark-61b66242-42c2-4ccf-8309-ed9acf357892/fetchFileTemp4491929243192754936.tmp 20/07/01 11:33:07.871 Executor task launch worker for task 0 INFO Utils: Copying /tmp/spark-c161bb4f-7487-48d2-bfb9-e261417dfa17/executor-a9c1de7e-75c1-4f10-9caf-a815379bb8c8/spark-61b66242-42c2-4ccf-8309-ed9acf357892/-19793296141593628383340_cache to /home/jenkins/workspace/NewSparkPullRequestBuilder/work/app-20200701113304-0000/0/./testJar-1593628380091.jar 20/07/01 11:33:07.890 Executor task launch worker for task 0 INFO Executor: Adding file:/home/jenkins/workspace/NewSparkPullRequestBuilder/work/app-20200701113304-0000/0/./testJar-1593628380091.jar to class loader 20/07/01 11:33:07.921 Executor task launch worker for task 0 DEBUG Executor: Task 0's epoch is 0 20/07/01 11:33:07.927 Executor task launch worker for task 0 DEBUG ExecutorMetricsPoller: stageTCMP: (0, 0) -> 1 20/07/01 11:33:07.987 Executor task launch worker for task 0 DEBUG BlockManager: Getting local block broadcast_0 20/07/01 11:33:07.988 Executor task launch worker for task 0 DEBUG BlockManager: Block broadcast_0 was not found 20/07/01 11:33:07.989 Executor task launch worker for task 0 INFO TorrentBroadcast: Started reading broadcast variable 0 with 1 pieces (estimated total size 4.0 MiB) 20/07/01 11:33:08.023 Executor task launch worker for task 0 DEBUG TorrentBroadcast: Reading piece broadcast_0_piece0 of broadcast_0 20/07/01 11:33:08.023 Executor task launch worker for task 0 DEBUG BlockManager: Getting local block broadcast_0_piece0 as bytes 20/07/01 11:33:08.025 Executor task launch worker for task 0 DEBUG BlockManager: Getting remote block broadcast_0_piece0 20/07/01 11:33:08.053 Executor task launch worker for task 0 DEBUG BlockManager: Getting remote block broadcast_0_piece0 from BlockManagerId(driver, amp-jenkins-worker-04.amp, 43457, None) 20/07/01 11:33:08.062 Executor task launch worker for task 0 DEBUG TransportClientFactory: Creating new connection to amp-jenkins-worker-04.amp/192.168.10.24:43457 20/07/01 11:33:08.064 Executor task launch worker for task 0 DEBUG TransportClientFactory: Connection to amp-jenkins-worker-04.amp/192.168.10.24:43457 successful, running bootstraps... 20/07/01 11:33:08.064 Executor task launch worker for task 0 INFO TransportClientFactory: Successfully created connection to amp-jenkins-worker-04.amp/192.168.10.24:43457 after 2 ms (0 ms spent in bootstraps) 20/07/01 11:33:08.087 shuffle-client-4-1 DEBUG TransportClient: Sending fetch chunk request 0 to amp-jenkins-worker-04.amp/192.168.10.24:43457 20/07/01 11:33:08.114 Executor task launch worker for task 0 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 2.4 KiB, free 366.3 MiB) 20/07/01 11:33:08.125 Executor task launch worker for task 0 DEBUG BlockManagerMaster: Updated info of block broadcast_0_piece0 20/07/01 11:33:08.125 Executor task launch worker for task 0 DEBUG BlockManager: Told master about block broadcast_0_piece0 20/07/01 11:33:08.126 Executor task launch worker for task 0 DEBUG BlockManager: Put block broadcast_0_piece0 locally took 14 ms 20/07/01 11:33:08.128 Executor task launch worker for task 0 DEBUG BlockManager: Putting block broadcast_0_piece0 without replication took 16 ms 20/07/01 11:33:08.130 Executor task launch worker for task 0 INFO TorrentBroadcast: Reading broadcast variable 0 took 140 ms 20/07/01 11:33:08.367 Executor task launch worker for task 0 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 4.2 KiB, free 366.3 MiB) 20/07/01 11:33:08.369 Executor task launch worker for task 0 DEBUG BlockManager: Put block broadcast_0 locally took 109 ms 20/07/01 11:33:08.369 Executor task launch worker for task 0 DEBUG BlockManager: Putting block broadcast_0 without replication took 109 ms 20/07/01 11:33:08.802 Executor task launch worker for task 0 INFO Executor: Finished task 0.0 in stage 0.0 (TID 0). 863 bytes result sent to driver 20/07/01 11:33:08.804 Executor task launch worker for task 0 DEBUG ExecutorMetricsPoller: removing (0, 0) from stageTCMP 20/07/01 11:33:08.812 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 1 20/07/01 11:33:08.813 Executor task launch worker for task 1 INFO Executor: Running task 1.0 in stage 0.0 (TID 1) 20/07/01 11:33:08.815 Executor task launch worker for task 1 DEBUG Executor: Task 1's epoch is 0 20/07/01 11:33:08.815 Executor task launch worker for task 1 DEBUG ExecutorMetricsPoller: stageTCMP: (0, 0) -> 1 20/07/01 11:33:08.824 Executor task launch worker for task 1 INFO Executor: Finished task 1.0 in stage 0.0 (TID 1). 824 bytes result sent to driver 20/07/01 11:33:08.825 Executor task launch worker for task 1 DEBUG ExecutorMetricsPoller: removing (0, 0) from stageTCMP 20/07/01 11:33:09.272 block-manager-slave-async-thread-pool-0 DEBUG BlockManagerSlaveEndpoint: removing broadcast 0 20/07/01 11:33:09.273 block-manager-slave-async-thread-pool-0 DEBUG BlockManager: Removing broadcast 0 20/07/01 11:33:09.280 block-manager-slave-async-thread-pool-0 DEBUG BlockManager: Removing block broadcast_0_piece0 20/07/01 11:33:09.281 block-manager-slave-async-thread-pool-0 DEBUG MemoryStore: Block broadcast_0_piece0 of size 2425 dropped from memory (free 384089052) 20/07/01 11:33:09.288 block-manager-slave-async-thread-pool-0 DEBUG BlockManagerMaster: Updated info of block broadcast_0_piece0 20/07/01 11:33:09.288 block-manager-slave-async-thread-pool-0 DEBUG BlockManager: Told master about block broadcast_0_piece0 20/07/01 11:33:09.289 block-manager-slave-async-thread-pool-0 DEBUG BlockManager: Removing block broadcast_0 20/07/01 11:33:09.289 block-manager-slave-async-thread-pool-0 DEBUG MemoryStore: Block broadcast_0 of size 4336 dropped from memory (free 384093388) 20/07/01 11:33:09.291 block-manager-slave-async-thread-pool-2 DEBUG BlockManagerSlaveEndpoint: Done removing broadcast 0, response is 0 20/07/01 11:33:09.294 block-manager-slave-async-thread-pool-2 DEBUG BlockManagerSlaveEndpoint: Sent response: 0 to amp-jenkins-worker-04.amp:35835 20/07/01 11:33:13.399 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 2 20/07/01 11:33:13.399 Executor task launch worker for task 2 INFO Executor: Running task 0.0 in stage 1.0 (TID 2) 20/07/01 11:33:13.403 Executor task launch worker for task 2 DEBUG Executor: Task 2's epoch is 0 20/07/01 11:33:13.404 Executor task launch worker for task 2 DEBUG ExecutorMetricsPoller: stageTCMP: (1, 0) -> 1 20/07/01 11:33:13.406 Executor task launch worker for task 2 DEBUG BlockManager: Getting local block broadcast_1 20/07/01 11:33:13.406 Executor task launch worker for task 2 DEBUG BlockManager: Block broadcast_1 was not found 20/07/01 11:33:13.407 Executor task launch worker for task 2 INFO TorrentBroadcast: Started reading broadcast variable 1 with 1 pieces (estimated total size 4.0 MiB) 20/07/01 11:33:13.407 Executor task launch worker for task 2 DEBUG TorrentBroadcast: Reading piece broadcast_1_piece0 of broadcast_1 20/07/01 11:33:13.407 Executor task launch worker for task 2 DEBUG BlockManager: Getting local block broadcast_1_piece0 as bytes 20/07/01 11:33:13.407 Executor task launch worker for task 2 DEBUG BlockManager: Getting remote block broadcast_1_piece0 20/07/01 11:33:13.411 Executor task launch worker for task 2 DEBUG BlockManager: Getting remote block broadcast_1_piece0 from BlockManagerId(driver, amp-jenkins-worker-04.amp, 43457, None) 20/07/01 11:33:13.413 shuffle-client-4-1 DEBUG TransportClient: Sending fetch chunk request 0 to amp-jenkins-worker-04.amp/192.168.10.24:43457 20/07/01 11:33:13.416 Executor task launch worker for task 2 INFO MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 11.0 KiB, free 366.3 MiB) 20/07/01 11:33:13.424 Executor task launch worker for task 2 DEBUG BlockManagerMaster: Updated info of block broadcast_1_piece0 20/07/01 11:33:13.424 Executor task launch worker for task 2 DEBUG BlockManager: Told master about block broadcast_1_piece0 20/07/01 11:33:13.424 Executor task launch worker for task 2 DEBUG BlockManager: Put block broadcast_1_piece0 locally took 9 ms 20/07/01 11:33:13.424 Executor task launch worker for task 2 DEBUG BlockManager: Putting block broadcast_1_piece0 without replication took 9 ms 20/07/01 11:33:13.424 Executor task launch worker for task 2 INFO TorrentBroadcast: Reading broadcast variable 1 took 17 ms 20/07/01 11:33:13.428 Executor task launch worker for task 2 INFO MemoryStore: Block broadcast_1 stored as values in memory (estimated size 24.1 KiB, free 366.3 MiB) 20/07/01 11:33:13.429 Executor task launch worker for task 2 DEBUG BlockManager: Put block broadcast_1 locally took 2 ms 20/07/01 11:33:13.429 Executor task launch worker for task 2 DEBUG BlockManager: Putting block broadcast_1 without replication took 2 ms 20/07/01 11:33:13.844 Executor task launch worker for task 2 DEBUG CodeGenerator: /* 001 */ public Object generate(Object[] references) { /* 002 */ return new GeneratedIteratorForCodegenStage1(references); /* 003 */ } /* 004 */ /* 005 */ // codegenStageId=1 /* 006 */ final class GeneratedIteratorForCodegenStage1 extends org.apache.spark.sql.execution.BufferedRowIterator { /* 007 */ private Object[] references; /* 008 */ private scala.collection.Iterator[] inputs; /* 009 */ private boolean agg_initAgg_0; /* 010 */ private org.apache.spark.unsafe.KVIterator agg_mapIter_0; /* 011 */ private org.apache.spark.sql.execution.UnsafeFixedWidthAggregationMap agg_hashMap_0; /* 012 */ private org.apache.spark.sql.execution.UnsafeKVExternalSorter agg_sorter_0; /* 013 */ private boolean range_initRange_0; /* 014 */ private long range_nextIndex_0; /* 015 */ private TaskContext range_taskContext_0; /* 016 */ private InputMetrics range_inputMetrics_0; /* 017 */ private long range_batchEnd_0; /* 018 */ private long range_numElementsTodo_0; /* 019 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] range_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[5]; /* 020 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] agg_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[2]; /* 021 */ /* 022 */ public GeneratedIteratorForCodegenStage1(Object[] references) { /* 023 */ this.references = references; /* 024 */ } /* 025 */ /* 026 */ public void init(int index, scala.collection.Iterator[] inputs) { /* 027 */ partitionIndex = index; /* 028 */ this.inputs = inputs; /* 029 */ /* 030 */ range_taskContext_0 = TaskContext.get(); /* 031 */ range_inputMetrics_0 = range_taskContext_0.taskMetrics().inputMetrics(); /* 032 */ range_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 033 */ range_mutableStateArray_0[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 034 */ range_mutableStateArray_0[2] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 035 */ range_mutableStateArray_0[3] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 036 */ agg_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(range_mutableStateArray_0[3], 4); /* 037 */ range_mutableStateArray_0[4] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); /* 038 */ agg_mutableStateArray_0[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(range_mutableStateArray_0[4], 4); /* 039 */ /* 040 */ } /* 041 */ /* 042 */ private void agg_doAggregateWithKeysOutput_0(UnsafeRow agg_keyTerm_0, UnsafeRow agg_bufferTerm_0) /* 043 */ throws java.io.IOException { /* 044 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[5] /* numOutputRows */).add(1); /* 045 */ /* 046 */ ArrayData agg_value_8 = agg_keyTerm_0.getArray(0); /* 047 */ long agg_value_9 = agg_bufferTerm_0.getLong(0); /* 048 */ /* 049 */ range_mutableStateArray_0[4].reset(); /* 050 */ /* 051 */ // Remember the current cursor so that we can calculate how many bytes are /* 052 */ // written later. /* 053 */ final int agg_previousCursor_1 = range_mutableStateArray_0[4].cursor(); /* 054 */ /* 055 */ final ArrayData agg_tmpInput_1 = agg_value_8; /* 056 */ if (agg_tmpInput_1 instanceof UnsafeArrayData) { /* 057 */ range_mutableStateArray_0[4].write((UnsafeArrayData) agg_tmpInput_1); /* 058 */ } else { /* 059 */ final int agg_numElements_1 = agg_tmpInput_1.numElements(); /* 060 */ agg_mutableStateArray_0[1].initialize(agg_numElements_1); /* 061 */ /* 062 */ for (int agg_index_1 = 0; agg_index_1 < agg_numElements_1; agg_index_1++) { /* 063 */ if (agg_tmpInput_1.isNullAt(agg_index_1)) { /* 064 */ agg_mutableStateArray_0[1].setNull4Bytes(agg_index_1); /* 065 */ } else { /* 066 */ agg_mutableStateArray_0[1].write(agg_index_1, agg_tmpInput_1.getInt(agg_index_1)); /* 067 */ } /* 068 */ /* 069 */ } /* 070 */ } /* 071 */ /* 072 */ range_mutableStateArray_0[4].setOffsetAndSizeFromPreviousCursor(0, agg_previousCursor_1); /* 073 */ /* 074 */ range_mutableStateArray_0[4].write(1, agg_value_9); /* 075 */ append((range_mutableStateArray_0[4].getRow())); /* 076 */ /* 077 */ } /* 078 */ /* 079 */ private void initRange(int idx) { /* 080 */ java.math.BigInteger index = java.math.BigInteger.valueOf(idx); /* 081 */ java.math.BigInteger numSlice = java.math.BigInteger.valueOf(2L); /* 082 */ java.math.BigInteger numElement = java.math.BigInteger.valueOf(71773L); /* 083 */ java.math.BigInteger step = java.math.BigInteger.valueOf(1L); /* 084 */ java.math.BigInteger start = java.math.BigInteger.valueOf(0L); /* 085 */ long partitionEnd; /* 086 */ /* 087 */ java.math.BigInteger st = index.multiply(numElement).divide(numSlice).multiply(step).add(start); /* 088 */ if (st.compareTo(java.math.BigInteger.valueOf(Long.MAX_VALUE)) > 0) { /* 089 */ range_nextIndex_0 = Long.MAX_VALUE; /* 090 */ } else if (st.compareTo(java.math.BigInteger.valueOf(Long.MIN_VALUE)) < 0) { /* 091 */ range_nextIndex_0 = Long.MIN_VALUE; /* 092 */ } else { /* 093 */ range_nextIndex_0 = st.longValue(); /* 094 */ } /* 095 */ range_batchEnd_0 = range_nextIndex_0; /* 096 */ /* 097 */ java.math.BigInteger end = index.add(java.math.BigInteger.ONE).multiply(numElement).divide(numSlice) /* 098 */ .multiply(step).add(start); /* 099 */ if (end.compareTo(java.math.BigInteger.valueOf(Long.MAX_VALUE)) > 0) { /* 100 */ partitionEnd = Long.MAX_VALUE; /* 101 */ } else if (end.compareTo(java.math.BigInteger.valueOf(Long.MIN_VALUE)) < 0) { /* 102 */ partitionEnd = Long.MIN_VALUE; /* 103 */ } else { /* 104 */ partitionEnd = end.longValue(); /* 105 */ } /* 106 */ /* 107 */ java.math.BigInteger startToEnd = java.math.BigInteger.valueOf(partitionEnd).subtract( /* 108 */ java.math.BigInteger.valueOf(range_nextIndex_0)); /* 109 */ range_numElementsTodo_0 = startToEnd.divide(step).longValue(); /* 110 */ if (range_numElementsTodo_0 < 0) { /* 111 */ range_numElementsTodo_0 = 0; /* 112 */ } else if (startToEnd.remainder(step).compareTo(java.math.BigInteger.valueOf(0L)) != 0) { /* 113 */ range_numElementsTodo_0++; /* 114 */ } /* 115 */ } /* 116 */ /* 117 */ private void agg_doConsume_0(int agg_expr_0_0, boolean agg_exprIsNull_0_0) throws java.io.IOException { /* 118 */ UnsafeRow agg_unsafeRowAggBuffer_0 = null; /* 119 */ /* 120 */ // generate grouping key /* 121 */ range_mutableStateArray_0[3].reset(); /* 122 */ /* 123 */ ArrayData agg_arrayData_0 = ArrayData.allocateArrayData( /* 124 */ 4, 1L, " createArray failed."); /* 125 */ /* 126 */ if (agg_exprIsNull_0_0) { /* 127 */ agg_arrayData_0.setNullAt(0); /* 128 */ } else { /* 129 */ agg_arrayData_0.setInt(0, agg_expr_0_0); /* 130 */ } /* 131 */ // Remember the current cursor so that we can calculate how many bytes are /* 132 */ // written later. /* 133 */ final int agg_previousCursor_0 = range_mutableStateArray_0[3].cursor(); /* 134 */ /* 135 */ final ArrayData agg_tmpInput_0 = agg_arrayData_0; /* 136 */ if (agg_tmpInput_0 instanceof UnsafeArrayData) { /* 137 */ range_mutableStateArray_0[3].write((UnsafeArrayData) agg_tmpInput_0); /* 138 */ } else { /* 139 */ final int agg_numElements_0 = agg_tmpInput_0.numElements(); /* 140 */ agg_mutableStateArray_0[0].initialize(agg_numElements_0); /* 141 */ /* 142 */ for (int agg_index_0 = 0; agg_index_0 < agg_numElements_0; agg_index_0++) { /* 143 */ if (agg_tmpInput_0.isNullAt(agg_index_0)) { /* 144 */ agg_mutableStateArray_0[0].setNull4Bytes(agg_index_0); /* 145 */ } else { /* 146 */ agg_mutableStateArray_0[0].write(agg_index_0, agg_tmpInput_0.getInt(agg_index_0)); /* 147 */ } /* 148 */ /* 149 */ } /* 150 */ } /* 151 */ /* 152 */ range_mutableStateArray_0[3].setOffsetAndSizeFromPreviousCursor(0, agg_previousCursor_0); /* 153 */ int agg_unsafeRowKeyHash_0 = (range_mutableStateArray_0[3].getRow()).hashCode(); /* 154 */ if (true) { /* 155 */ // try to get the buffer from hash map /* 156 */ agg_unsafeRowAggBuffer_0 = /* 157 */ agg_hashMap_0.getAggregationBufferFromUnsafeRow((range_mutableStateArray_0[3].getRow()), agg_unsafeRowKeyHash_0); /* 158 */ } /* 159 */ // Can't allocate buffer from the hash map. Spill the map and fallback to sort-based /* 160 */ // aggregation after processing all input rows. /* 161 */ if (agg_unsafeRowAggBuffer_0 == null) { /* 162 */ if (agg_sorter_0 == null) { /* 163 */ agg_sorter_0 = agg_hashMap_0.destructAndCreateExternalSorter(); /* 164 */ } else { /* 165 */ agg_sorter_0.merge(agg_hashMap_0.destructAndCreateExternalSorter()); /* 166 */ } /* 167 */ /* 168 */ // the hash map had be spilled, it should have enough memory now, /* 169 */ // try to allocate buffer again. /* 170 */ agg_unsafeRowAggBuffer_0 = agg_hashMap_0.getAggregationBufferFromUnsafeRow( /* 171 */ (range_mutableStateArray_0[3].getRow()), agg_unsafeRowKeyHash_0); /* 172 */ if (agg_unsafeRowAggBuffer_0 == null) { /* 173 */ // failed to allocate the first page /* 174 */ throw new org.apache.spark.memory.SparkOutOfMemoryError("No enough memory for aggregation"); /* 175 */ } /* 176 */ } /* 177 */ /* 178 */ // common sub-expressions /* 179 */ /* 180 */ // evaluate aggregate functions and update aggregation buffers /* 181 */ /* 182 */ long agg_value_6 = agg_unsafeRowAggBuffer_0.getLong(0); /* 183 */ /* 184 */ long agg_value_5 = -1L; /* 185 */ /* 186 */ agg_value_5 = agg_value_6 + 1L; /* 187 */ /* 188 */ agg_unsafeRowAggBuffer_0.setLong(0, agg_value_5); /* 189 */ /* 190 */ } /* 191 */ /* 192 */ private void agg_doAggregateWithKeys_0() throws java.io.IOException { /* 193 */ // initialize Range /* 194 */ if (!range_initRange_0) { /* 195 */ range_initRange_0 = true; /* 196 */ initRange(partitionIndex); /* 197 */ } /* 198 */ /* 199 */ while (true) { /* 200 */ if (range_nextIndex_0 == range_batchEnd_0) { /* 201 */ long range_nextBatchTodo_0; /* 202 */ if (range_numElementsTodo_0 > 1000L) { /* 203 */ range_nextBatchTodo_0 = 1000L; /* 204 */ range_numElementsTodo_0 -= 1000L; /* 205 */ } else { /* 206 */ range_nextBatchTodo_0 = range_numElementsTodo_0; /* 207 */ range_numElementsTodo_0 = 0; /* 208 */ if (range_nextBatchTodo_0 == 0) break; /* 209 */ } /* 210 */ range_batchEnd_0 += range_nextBatchTodo_0 * 1L; /* 211 */ } /* 212 */ /* 213 */ int range_localEnd_0 = (int)((range_batchEnd_0 - range_nextIndex_0) / 1L); /* 214 */ for (int range_localIdx_0 = 0; range_localIdx_0 < range_localEnd_0; range_localIdx_0++) { /* 215 */ long range_value_0 = ((long)range_localIdx_0 * 1L) + range_nextIndex_0; /* 216 */ /* 217 */ boolean project_isNull_1 = false; /* 218 */ long project_value_1 = -1L; /* 219 */ if (10L == 0) { /* 220 */ project_isNull_1 = true; /* 221 */ } else { /* 222 */ project_value_1 = (long)(range_value_0 % 10L); /* 223 */ } /* 224 */ boolean project_isNull_0 = project_isNull_1; /* 225 */ int project_value_0 = -1; /* 226 */ if (!project_isNull_1) { /* 227 */ project_value_0 = (int) project_value_1; /* 228 */ } /* 229 */ /* 230 */ agg_doConsume_0(project_value_0, project_isNull_0); /* 231 */ /* 232 */ // shouldStop check is eliminated /* 233 */ } /* 234 */ range_nextIndex_0 = range_batchEnd_0; /* 235 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[4] /* numOutputRows */).add(range_localEnd_0); /* 236 */ range_inputMetrics_0.incRecordsRead(range_localEnd_0); /* 237 */ range_taskContext_0.killTaskIfInterrupted(); /* 238 */ } /* 239 */ /* 240 */ agg_mapIter_0 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[0] /* plan */).finishAggregate(agg_hashMap_0, agg_sorter_0, ((org.apache.spark.sql.execution.metric.SQLMetric) references[1] /* peakMemory */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[2] /* spillSize */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[3] /* avgHashProbe */)); /* 241 */ } /* 242 */ /* 243 */ protected void processNext() throws java.io.IOException { /* 244 */ if (!agg_initAgg_0) { /* 245 */ agg_initAgg_0 = true; /* 246 */ /* 247 */ agg_hashMap_0 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[0] /* plan */).createHashMap(); /* 248 */ long wholestagecodegen_beforeAgg_0 = System.nanoTime(); /* 249 */ agg_doAggregateWithKeys_0(); /* 250 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[6] /* aggTime */).add((System.nanoTime() - wholestagecodegen_beforeAgg_0) / 1000000); /* 251 */ } /* 252 */ // output the result /* 253 */ /* 254 */ while ( agg_mapIter_0.next()) { /* 255 */ UnsafeRow agg_aggKey_0 = (UnsafeRow) agg_mapIter_0.getKey(); /* 256 */ UnsafeRow agg_aggBuffer_0 = (UnsafeRow) agg_mapIter_0.getValue(); /* 257 */ agg_doAggregateWithKeysOutput_0(agg_aggKey_0, agg_aggBuffer_0); /* 258 */ if (shouldStop()) return; /* 259 */ } /* 260 */ agg_mapIter_0.close(); /* 261 */ if (agg_sorter_0 == null) { /* 262 */ agg_hashMap_0.free(); /* 263 */ } /* 264 */ } /* 265 */ /* 266 */ } 20/07/01 11:33:14.185 Executor task launch worker for task 2 INFO CodeGenerator: Code generated in 415.247445 ms 20/07/01 11:33:14.314 Executor task launch worker for task 2 DEBUG GenerateUnsafeProjection: code for pmod(hash(input[0, array, false], 42), 200): /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = false; /* 032 */ int value_0 = -1; /* 033 */ if (200 == 0) { /* 034 */ isNull_0 = true; /* 035 */ } else { /* 036 */ int value_1 = 42; /* 037 */ ArrayData value_2 = i.getArray(0); /* 038 */ /* 039 */ for (int index_0 = 0; index_0 < value_2.numElements(); index_0++) { /* 040 */ /* 041 */ if (!value_2.isNullAt(index_0)) { /* 042 */ /* 043 */ final int element_0 = value_2.getInt(index_0); /* 044 */ value_1 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(element_0, value_1); /* 045 */ /* 046 */ } /* 047 */ /* 048 */ } /* 049 */ /* 050 */ int remainder_0 = value_1 % 200; /* 051 */ if (remainder_0 < 0) { /* 052 */ value_0=(remainder_0 + 200) % 200; /* 053 */ } else { /* 054 */ value_0=remainder_0; /* 055 */ } /* 056 */ /* 057 */ } /* 058 */ if (isNull_0) { /* 059 */ mutableStateArray_0[0].setNullAt(0); /* 060 */ } else { /* 061 */ mutableStateArray_0[0].write(0, value_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.318 Executor task launch worker for task 2 DEBUG CodeGenerator: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = false; /* 032 */ int value_0 = -1; /* 033 */ if (200 == 0) { /* 034 */ isNull_0 = true; /* 035 */ } else { /* 036 */ int value_1 = 42; /* 037 */ ArrayData value_2 = i.getArray(0); /* 038 */ /* 039 */ for (int index_0 = 0; index_0 < value_2.numElements(); index_0++) { /* 040 */ /* 041 */ if (!value_2.isNullAt(index_0)) { /* 042 */ /* 043 */ final int element_0 = value_2.getInt(index_0); /* 044 */ value_1 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(element_0, value_1); /* 045 */ /* 046 */ } /* 047 */ /* 048 */ } /* 049 */ /* 050 */ int remainder_0 = value_1 % 200; /* 051 */ if (remainder_0 < 0) { /* 052 */ value_0=(remainder_0 + 200) % 200; /* 053 */ } else { /* 054 */ value_0=remainder_0; /* 055 */ } /* 056 */ /* 057 */ } /* 058 */ if (isNull_0) { /* 059 */ mutableStateArray_0[0].setNullAt(0); /* 060 */ } else { /* 061 */ mutableStateArray_0[0].write(0, value_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.341 Executor task launch worker for task 2 INFO CodeGenerator: Code generated in 26.800994 ms 20/07/01 11:33:14.357 Executor task launch worker for task 2 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:14.359 Executor task launch worker for task 2 DEBUG CodeGenerator: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:14.369 Executor task launch worker for task 2 INFO CodeGenerator: Code generated in 11.126572 ms 20/07/01 11:33:14.378 Executor task launch worker for task 2 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.395 Executor task launch worker for task 2 DEBUG CodeGenerator: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.410 Executor task launch worker for task 2 INFO CodeGenerator: Code generated in 32.356204 ms 20/07/01 11:33:14.418 Executor task launch worker for task 2 DEBUG TaskMemoryManager: Task 2 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4cfe1aa3 20/07/01 11:33:14.426 Executor task launch worker for task 2 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:14.428 Executor task launch worker for task 2 DEBUG CodeGenerator: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:14.438 Executor task launch worker for task 2 INFO CodeGenerator: Code generated in 11.504649 ms 20/07/01 11:33:14.443 Executor task launch worker for task 2 DEBUG TaskMemoryManager: Task 2 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@4cfe1aa3 20/07/01 11:33:14.475 Executor task launch worker for task 2 DEBUG TaskMemoryManager: Task 2 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4cfe1aa3 20/07/01 11:33:14.502 Executor task launch worker for task 2 DEBUG TaskMemoryManager: Task 2 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@4cfe1aa3 20/07/01 11:33:14.522 Executor task launch worker for task 2 INFO Executor: Finished task 0.0 in stage 1.0 (TID 2). 2401 bytes result sent to driver 20/07/01 11:33:14.523 Executor task launch worker for task 2 DEBUG ExecutorMetricsPoller: removing (1, 0) from stageTCMP 20/07/01 11:33:14.530 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 3 20/07/01 11:33:14.531 Executor task launch worker for task 3 INFO Executor: Running task 1.0 in stage 1.0 (TID 3) 20/07/01 11:33:14.532 Executor task launch worker for task 3 DEBUG Executor: Task 3's epoch is 0 20/07/01 11:33:14.532 Executor task launch worker for task 3 DEBUG ExecutorMetricsPoller: stageTCMP: (1, 0) -> 1 20/07/01 11:33:14.547 Executor task launch worker for task 3 DEBUG GenerateUnsafeProjection: code for pmod(hash(input[0, array, false], 42), 200): /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = false; /* 032 */ int value_0 = -1; /* 033 */ if (200 == 0) { /* 034 */ isNull_0 = true; /* 035 */ } else { /* 036 */ int value_1 = 42; /* 037 */ ArrayData value_2 = i.getArray(0); /* 038 */ /* 039 */ for (int index_0 = 0; index_0 < value_2.numElements(); index_0++) { /* 040 */ /* 041 */ if (!value_2.isNullAt(index_0)) { /* 042 */ /* 043 */ final int element_0 = value_2.getInt(index_0); /* 044 */ value_1 = org.apache.spark.unsafe.hash.Murmur3_x86_32.hashInt(element_0, value_1); /* 045 */ /* 046 */ } /* 047 */ /* 048 */ } /* 049 */ /* 050 */ int remainder_0 = value_1 % 200; /* 051 */ if (remainder_0 < 0) { /* 052 */ value_0=(remainder_0 + 200) % 200; /* 053 */ } else { /* 054 */ value_0=remainder_0; /* 055 */ } /* 056 */ /* 057 */ } /* 058 */ if (isNull_0) { /* 059 */ mutableStateArray_0[0].setNullAt(0); /* 060 */ } else { /* 061 */ mutableStateArray_0[0].write(0, value_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.550 Executor task launch worker for task 3 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:14.554 Executor task launch worker for task 3 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.554 Executor task launch worker for task 3 DEBUG TaskMemoryManager: Task 3 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@74cade 20/07/01 11:33:14.558 Executor task launch worker for task 3 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:14.559 Executor task launch worker for task 3 DEBUG TaskMemoryManager: Task 3 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@74cade 20/07/01 11:33:14.576 Executor task launch worker for task 3 DEBUG TaskMemoryManager: Task 3 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@74cade 20/07/01 11:33:14.590 Executor task launch worker for task 3 DEBUG TaskMemoryManager: Task 3 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@74cade 20/07/01 11:33:14.599 Executor task launch worker for task 3 INFO Executor: Finished task 1.0 in stage 1.0 (TID 3). 2358 bytes result sent to driver 20/07/01 11:33:14.600 Executor task launch worker for task 3 DEBUG ExecutorMetricsPoller: removing (1, 0) from stageTCMP 20/07/01 11:33:14.661 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 4 20/07/01 11:33:14.662 Executor task launch worker for task 4 INFO Executor: Running task 43.0 in stage 2.0 (TID 4) 20/07/01 11:33:14.666 Executor task launch worker for task 4 DEBUG Executor: Task 4's epoch is 1 20/07/01 11:33:14.667 Executor task launch worker for task 4 INFO MapOutputTrackerWorker: Updating epoch to 1 and clearing cache 20/07/01 11:33:14.667 Executor task launch worker for task 4 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:14.668 Executor task launch worker for task 4 DEBUG BlockManager: Getting local block broadcast_2 20/07/01 11:33:14.668 Executor task launch worker for task 4 DEBUG BlockManager: Block broadcast_2 was not found 20/07/01 11:33:14.668 Executor task launch worker for task 4 INFO TorrentBroadcast: Started reading broadcast variable 2 with 1 pieces (estimated total size 4.0 MiB) 20/07/01 11:33:14.668 Executor task launch worker for task 4 DEBUG TorrentBroadcast: Reading piece broadcast_2_piece0 of broadcast_2 20/07/01 11:33:14.668 Executor task launch worker for task 4 DEBUG BlockManager: Getting local block broadcast_2_piece0 as bytes 20/07/01 11:33:14.669 Executor task launch worker for task 4 DEBUG BlockManager: Getting remote block broadcast_2_piece0 20/07/01 11:33:14.672 Executor task launch worker for task 4 DEBUG BlockManager: Getting remote block broadcast_2_piece0 from BlockManagerId(driver, amp-jenkins-worker-04.amp, 43457, None) 20/07/01 11:33:14.674 shuffle-client-4-1 DEBUG TransportClient: Sending fetch chunk request 0 to amp-jenkins-worker-04.amp/192.168.10.24:43457 20/07/01 11:33:14.675 Executor task launch worker for task 4 INFO MemoryStore: Block broadcast_2_piece0 stored as bytes in memory (estimated size 12.7 KiB, free 366.3 MiB) 20/07/01 11:33:14.678 Executor task launch worker for task 4 DEBUG BlockManagerMaster: Updated info of block broadcast_2_piece0 20/07/01 11:33:14.679 Executor task launch worker for task 4 DEBUG BlockManager: Told master about block broadcast_2_piece0 20/07/01 11:33:14.679 Executor task launch worker for task 4 DEBUG BlockManager: Put block broadcast_2_piece0 locally took 3 ms 20/07/01 11:33:14.679 Executor task launch worker for task 4 DEBUG BlockManager: Putting block broadcast_2_piece0 without replication took 4 ms 20/07/01 11:33:14.679 Executor task launch worker for task 4 INFO TorrentBroadcast: Reading broadcast variable 2 took 10 ms 20/07/01 11:33:14.682 Executor task launch worker for task 4 INFO MemoryStore: Block broadcast_2 stored as values in memory (estimated size 26.3 KiB, free 366.2 MiB) 20/07/01 11:33:14.682 Executor task launch worker for task 4 DEBUG BlockManager: Put block broadcast_2 locally took 1 ms 20/07/01 11:33:14.682 Executor task launch worker for task 4 DEBUG BlockManager: Putting block broadcast_2 without replication took 1 ms 20/07/01 11:33:14.705 Executor task launch worker for task 4 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:14.705 Executor task launch worker for task 4 INFO MapOutputTrackerWorker: Don't have map outputs for shuffle 0, fetching them 20/07/01 11:33:14.706 Executor task launch worker for task 4 INFO MapOutputTrackerWorker: Doing the fetch; tracker endpoint = NettyRpcEndpointRef(spark://MapOutputTracker@amp-jenkins-worker-04.amp:35835) 20/07/01 11:33:14.759 Executor task launch worker for task 4 INFO MapOutputTrackerWorker: Got the output locations 20/07/01 11:33:14.760 Executor task launch worker for task 4 DEBUG MapOutputTrackerWorker: Fetching map output statuses for shuffle 0 took 54 ms 20/07/01 11:33:14.761 Executor task launch worker for task 4 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 43-44 20/07/01 11:33:14.776 Executor task launch worker for task 4 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:14.784 Executor task launch worker for task 4 INFO ShuffleBlockFetcherIterator: Getting 2 (160.0 B) non-empty blocks including 2 (160.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:14.785 Executor task launch worker for task 4 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 13 ms 20/07/01 11:33:14.786 Executor task launch worker for task 4 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_43,0), (shuffle_0_3_43,1) 20/07/01 11:33:14.790 Executor task launch worker for task 4 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 17 ms 20/07/01 11:33:14.799 Executor task launch worker for task 4 DEBUG CodeGenerator: /* 001 */ public Object generate(Object[] references) { /* 002 */ return new GeneratedIteratorForCodegenStage2(references); /* 003 */ } /* 004 */ /* 005 */ // codegenStageId=2 /* 006 */ final class GeneratedIteratorForCodegenStage2 extends org.apache.spark.sql.execution.BufferedRowIterator { /* 007 */ private Object[] references; /* 008 */ private scala.collection.Iterator[] inputs; /* 009 */ private boolean agg_initAgg_0; /* 010 */ private org.apache.spark.unsafe.KVIterator agg_mapIter_0; /* 011 */ private org.apache.spark.sql.execution.UnsafeFixedWidthAggregationMap agg_hashMap_0; /* 012 */ private org.apache.spark.sql.execution.UnsafeKVExternalSorter agg_sorter_0; /* 013 */ private scala.collection.Iterator inputadapter_input_0; /* 014 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] agg_mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[2]; /* 015 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] agg_mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[2]; /* 016 */ /* 017 */ public GeneratedIteratorForCodegenStage2(Object[] references) { /* 018 */ this.references = references; /* 019 */ } /* 020 */ /* 021 */ public void init(int index, scala.collection.Iterator[] inputs) { /* 022 */ partitionIndex = index; /* 023 */ this.inputs = inputs; /* 024 */ /* 025 */ inputadapter_input_0 = inputs[0]; /* 026 */ agg_mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 027 */ agg_mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(agg_mutableStateArray_0[0], 4); /* 028 */ agg_mutableStateArray_0[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(2, 32); /* 029 */ agg_mutableStateArray_1[1] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(agg_mutableStateArray_0[1], 4); /* 030 */ /* 031 */ } /* 032 */ /* 033 */ private void agg_doAggregateWithKeysOutput_0(UnsafeRow agg_keyTerm_0, UnsafeRow agg_bufferTerm_0) /* 034 */ throws java.io.IOException { /* 035 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[4] /* numOutputRows */).add(1); /* 036 */ /* 037 */ ArrayData agg_value_5 = agg_keyTerm_0.getArray(0); /* 038 */ long agg_value_6 = agg_bufferTerm_0.getLong(0); /* 039 */ /* 040 */ agg_mutableStateArray_0[1].reset(); /* 041 */ /* 042 */ // Remember the current cursor so that we can calculate how many bytes are /* 043 */ // written later. /* 044 */ final int agg_previousCursor_1 = agg_mutableStateArray_0[1].cursor(); /* 045 */ /* 046 */ final ArrayData agg_tmpInput_1 = agg_value_5; /* 047 */ if (agg_tmpInput_1 instanceof UnsafeArrayData) { /* 048 */ agg_mutableStateArray_0[1].write((UnsafeArrayData) agg_tmpInput_1); /* 049 */ } else { /* 050 */ final int agg_numElements_1 = agg_tmpInput_1.numElements(); /* 051 */ agg_mutableStateArray_1[1].initialize(agg_numElements_1); /* 052 */ /* 053 */ for (int agg_index_1 = 0; agg_index_1 < agg_numElements_1; agg_index_1++) { /* 054 */ if (agg_tmpInput_1.isNullAt(agg_index_1)) { /* 055 */ agg_mutableStateArray_1[1].setNull4Bytes(agg_index_1); /* 056 */ } else { /* 057 */ agg_mutableStateArray_1[1].write(agg_index_1, agg_tmpInput_1.getInt(agg_index_1)); /* 058 */ } /* 059 */ /* 060 */ } /* 061 */ } /* 062 */ /* 063 */ agg_mutableStateArray_0[1].setOffsetAndSizeFromPreviousCursor(0, agg_previousCursor_1); /* 064 */ /* 065 */ agg_mutableStateArray_0[1].write(1, agg_value_6); /* 066 */ append((agg_mutableStateArray_0[1].getRow())); /* 067 */ /* 068 */ } /* 069 */ /* 070 */ private void agg_doConsume_0(InternalRow inputadapter_row_0, ArrayData agg_expr_0_0, long agg_expr_1_0) throws java.io.IOException { /* 071 */ UnsafeRow agg_unsafeRowAggBuffer_0 = null; /* 072 */ /* 073 */ // generate grouping key /* 074 */ agg_mutableStateArray_0[0].reset(); /* 075 */ /* 076 */ // Remember the current cursor so that we can calculate how many bytes are /* 077 */ // written later. /* 078 */ final int agg_previousCursor_0 = agg_mutableStateArray_0[0].cursor(); /* 079 */ /* 080 */ final ArrayData agg_tmpInput_0 = agg_expr_0_0; /* 081 */ if (agg_tmpInput_0 instanceof UnsafeArrayData) { /* 082 */ agg_mutableStateArray_0[0].write((UnsafeArrayData) agg_tmpInput_0); /* 083 */ } else { /* 084 */ final int agg_numElements_0 = agg_tmpInput_0.numElements(); /* 085 */ agg_mutableStateArray_1[0].initialize(agg_numElements_0); /* 086 */ /* 087 */ for (int agg_index_0 = 0; agg_index_0 < agg_numElements_0; agg_index_0++) { /* 088 */ if (agg_tmpInput_0.isNullAt(agg_index_0)) { /* 089 */ agg_mutableStateArray_1[0].setNull4Bytes(agg_index_0); /* 090 */ } else { /* 091 */ agg_mutableStateArray_1[0].write(agg_index_0, agg_tmpInput_0.getInt(agg_index_0)); /* 092 */ } /* 093 */ /* 094 */ } /* 095 */ } /* 096 */ /* 097 */ agg_mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, agg_previousCursor_0); /* 098 */ int agg_unsafeRowKeyHash_0 = (agg_mutableStateArray_0[0].getRow()).hashCode(); /* 099 */ if (true) { /* 100 */ // try to get the buffer from hash map /* 101 */ agg_unsafeRowAggBuffer_0 = /* 102 */ agg_hashMap_0.getAggregationBufferFromUnsafeRow((agg_mutableStateArray_0[0].getRow()), agg_unsafeRowKeyHash_0); /* 103 */ } /* 104 */ // Can't allocate buffer from the hash map. Spill the map and fallback to sort-based /* 105 */ // aggregation after processing all input rows. /* 106 */ if (agg_unsafeRowAggBuffer_0 == null) { /* 107 */ if (agg_sorter_0 == null) { /* 108 */ agg_sorter_0 = agg_hashMap_0.destructAndCreateExternalSorter(); /* 109 */ } else { /* 110 */ agg_sorter_0.merge(agg_hashMap_0.destructAndCreateExternalSorter()); /* 111 */ } /* 112 */ /* 113 */ // the hash map had be spilled, it should have enough memory now, /* 114 */ // try to allocate buffer again. /* 115 */ agg_unsafeRowAggBuffer_0 = agg_hashMap_0.getAggregationBufferFromUnsafeRow( /* 116 */ (agg_mutableStateArray_0[0].getRow()), agg_unsafeRowKeyHash_0); /* 117 */ if (agg_unsafeRowAggBuffer_0 == null) { /* 118 */ // failed to allocate the first page /* 119 */ throw new org.apache.spark.memory.SparkOutOfMemoryError("No enough memory for aggregation"); /* 120 */ } /* 121 */ } /* 122 */ /* 123 */ // common sub-expressions /* 124 */ /* 125 */ // evaluate aggregate functions and update aggregation buffers /* 126 */ /* 127 */ long agg_value_3 = agg_unsafeRowAggBuffer_0.getLong(0); /* 128 */ /* 129 */ long agg_value_2 = -1L; /* 130 */ /* 131 */ agg_value_2 = agg_value_3 + agg_expr_1_0; /* 132 */ /* 133 */ agg_unsafeRowAggBuffer_0.setLong(0, agg_value_2); /* 134 */ /* 135 */ } /* 136 */ /* 137 */ private void agg_doAggregateWithKeys_0() throws java.io.IOException { /* 138 */ while ( inputadapter_input_0.hasNext()) { /* 139 */ InternalRow inputadapter_row_0 = (InternalRow) inputadapter_input_0.next(); /* 140 */ /* 141 */ ArrayData inputadapter_value_0 = inputadapter_row_0.getArray(0); /* 142 */ long inputadapter_value_1 = inputadapter_row_0.getLong(1); /* 143 */ /* 144 */ agg_doConsume_0(inputadapter_row_0, inputadapter_value_0, inputadapter_value_1); /* 145 */ // shouldStop check is eliminated /* 146 */ } /* 147 */ /* 148 */ agg_mapIter_0 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[0] /* plan */).finishAggregate(agg_hashMap_0, agg_sorter_0, ((org.apache.spark.sql.execution.metric.SQLMetric) references[1] /* peakMemory */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[2] /* spillSize */), ((org.apache.spark.sql.execution.metric.SQLMetric) references[3] /* avgHashProbe */)); /* 149 */ } /* 150 */ /* 151 */ protected void processNext() throws java.io.IOException { /* 152 */ if (!agg_initAgg_0) { /* 153 */ agg_initAgg_0 = true; /* 154 */ /* 155 */ agg_hashMap_0 = ((org.apache.spark.sql.execution.aggregate.HashAggregateExec) references[0] /* plan */).createHashMap(); /* 156 */ long wholestagecodegen_beforeAgg_0 = System.nanoTime(); /* 157 */ agg_doAggregateWithKeys_0(); /* 158 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[5] /* aggTime */).add((System.nanoTime() - wholestagecodegen_beforeAgg_0) / 1000000); /* 159 */ } /* 160 */ // output the result /* 161 */ /* 162 */ while ( agg_mapIter_0.next()) { /* 163 */ UnsafeRow agg_aggKey_0 = (UnsafeRow) agg_mapIter_0.getKey(); /* 164 */ UnsafeRow agg_aggBuffer_0 = (UnsafeRow) agg_mapIter_0.getValue(); /* 165 */ agg_doAggregateWithKeysOutput_0(agg_aggKey_0, agg_aggBuffer_0); /* 166 */ if (shouldStop()) return; /* 167 */ } /* 168 */ agg_mapIter_0.close(); /* 169 */ if (agg_sorter_0 == null) { /* 170 */ agg_hashMap_0.free(); /* 171 */ } /* 172 */ } /* 173 */ /* 174 */ } 20/07/01 11:33:14.834 Executor task launch worker for task 4 INFO CodeGenerator: Code generated in 40.704993 ms 20/07/01 11:33:14.839 Executor task launch worker for task 4 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:14.843 Executor task launch worker for task 4 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.843 Executor task launch worker for task 4 DEBUG TaskMemoryManager: Task 4 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@3c90be38 20/07/01 11:33:14.848 Executor task launch worker for task 4 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:14.858 Executor task launch worker for task 4 DEBUG TaskMemoryManager: Task 4 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@3c90be38 20/07/01 11:33:14.861 Executor task launch worker for task 4 DEBUG TaskMemoryManager: Task 4 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@3c90be38 20/07/01 11:33:14.861 Executor task launch worker for task 4 DEBUG TaskMemoryManager: Task 4 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@3c90be38 20/07/01 11:33:14.864 Executor task launch worker for task 4 INFO Executor: Finished task 43.0 in stage 2.0 (TID 4). 3375 bytes result sent to driver 20/07/01 11:33:14.864 Executor task launch worker for task 4 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:14.869 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 5 20/07/01 11:33:14.870 Executor task launch worker for task 5 INFO Executor: Running task 49.0 in stage 2.0 (TID 5) 20/07/01 11:33:14.871 Executor task launch worker for task 5 DEBUG Executor: Task 5's epoch is 1 20/07/01 11:33:14.871 Executor task launch worker for task 5 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:14.877 Executor task launch worker for task 5 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:14.877 Executor task launch worker for task 5 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 49-50 20/07/01 11:33:14.878 Executor task launch worker for task 5 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:14.878 Executor task launch worker for task 5 INFO ShuffleBlockFetcherIterator: Getting 2 (160.0 B) non-empty blocks including 2 (160.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:14.878 Executor task launch worker for task 5 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:14.879 Executor task launch worker for task 5 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_49,0), (shuffle_0_3_49,1) 20/07/01 11:33:14.879 Executor task launch worker for task 5 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 1 ms 20/07/01 11:33:14.882 Executor task launch worker for task 5 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:14.885 Executor task launch worker for task 5 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.885 Executor task launch worker for task 5 DEBUG TaskMemoryManager: Task 5 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@3cc662ba 20/07/01 11:33:14.889 Executor task launch worker for task 5 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:14.890 Executor task launch worker for task 5 DEBUG TaskMemoryManager: Task 5 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@3cc662ba 20/07/01 11:33:14.890 Executor task launch worker for task 5 DEBUG TaskMemoryManager: Task 5 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@3cc662ba 20/07/01 11:33:14.891 Executor task launch worker for task 5 DEBUG TaskMemoryManager: Task 5 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@3cc662ba 20/07/01 11:33:14.893 Executor task launch worker for task 5 INFO Executor: Finished task 49.0 in stage 2.0 (TID 5). 3379 bytes result sent to driver 20/07/01 11:33:14.893 Executor task launch worker for task 5 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:14.898 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 6 20/07/01 11:33:14.899 Executor task launch worker for task 6 INFO Executor: Running task 51.0 in stage 2.0 (TID 6) 20/07/01 11:33:14.900 Executor task launch worker for task 6 DEBUG Executor: Task 6's epoch is 1 20/07/01 11:33:14.900 Executor task launch worker for task 6 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:14.906 Executor task launch worker for task 6 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:14.906 Executor task launch worker for task 6 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 51-52 20/07/01 11:33:14.907 Executor task launch worker for task 6 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:14.908 Executor task launch worker for task 6 INFO ShuffleBlockFetcherIterator: Getting 2 (160.0 B) non-empty blocks including 2 (160.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:14.908 Executor task launch worker for task 6 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 1 ms 20/07/01 11:33:14.908 Executor task launch worker for task 6 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_51,0), (shuffle_0_3_51,1) 20/07/01 11:33:14.909 Executor task launch worker for task 6 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 1 ms 20/07/01 11:33:14.911 Executor task launch worker for task 6 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:14.915 Executor task launch worker for task 6 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.915 Executor task launch worker for task 6 DEBUG TaskMemoryManager: Task 6 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@22961c9a 20/07/01 11:33:14.918 Executor task launch worker for task 6 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:14.919 Executor task launch worker for task 6 DEBUG TaskMemoryManager: Task 6 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@22961c9a 20/07/01 11:33:14.919 Executor task launch worker for task 6 DEBUG TaskMemoryManager: Task 6 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@22961c9a 20/07/01 11:33:14.919 Executor task launch worker for task 6 DEBUG TaskMemoryManager: Task 6 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@22961c9a 20/07/01 11:33:14.921 Executor task launch worker for task 6 INFO Executor: Finished task 51.0 in stage 2.0 (TID 6). 3379 bytes result sent to driver 20/07/01 11:33:14.922 Executor task launch worker for task 6 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:14.927 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 7 20/07/01 11:33:14.927 Executor task launch worker for task 7 INFO Executor: Running task 66.0 in stage 2.0 (TID 7) 20/07/01 11:33:14.928 Executor task launch worker for task 7 DEBUG Executor: Task 7's epoch is 1 20/07/01 11:33:14.928 Executor task launch worker for task 7 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:14.933 Executor task launch worker for task 7 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:14.933 Executor task launch worker for task 7 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 66-67 20/07/01 11:33:14.934 Executor task launch worker for task 7 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:14.934 Executor task launch worker for task 7 INFO ShuffleBlockFetcherIterator: Getting 2 (160.0 B) non-empty blocks including 2 (160.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:14.934 Executor task launch worker for task 7 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:14.934 Executor task launch worker for task 7 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_66,0), (shuffle_0_3_66,1) 20/07/01 11:33:14.935 Executor task launch worker for task 7 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 1 ms 20/07/01 11:33:14.938 Executor task launch worker for task 7 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:14.941 Executor task launch worker for task 7 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.941 Executor task launch worker for task 7 DEBUG TaskMemoryManager: Task 7 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@65e803fb 20/07/01 11:33:14.944 Executor task launch worker for task 7 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:14.944 Executor task launch worker for task 7 DEBUG TaskMemoryManager: Task 7 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@65e803fb 20/07/01 11:33:14.945 Executor task launch worker for task 7 DEBUG TaskMemoryManager: Task 7 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@65e803fb 20/07/01 11:33:14.945 Executor task launch worker for task 7 DEBUG TaskMemoryManager: Task 7 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@65e803fb 20/07/01 11:33:14.946 Executor task launch worker for task 7 INFO Executor: Finished task 66.0 in stage 2.0 (TID 7). 3379 bytes result sent to driver 20/07/01 11:33:14.947 Executor task launch worker for task 7 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:14.952 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 8 20/07/01 11:33:14.953 Executor task launch worker for task 8 INFO Executor: Running task 89.0 in stage 2.0 (TID 8) 20/07/01 11:33:14.954 Executor task launch worker for task 8 DEBUG Executor: Task 8's epoch is 1 20/07/01 11:33:14.954 Executor task launch worker for task 8 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:14.958 Executor task launch worker for task 8 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:14.958 Executor task launch worker for task 8 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 89-90 20/07/01 11:33:14.958 Executor task launch worker for task 8 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:14.959 Executor task launch worker for task 8 INFO ShuffleBlockFetcherIterator: Getting 2 (160.0 B) non-empty blocks including 2 (160.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:14.959 Executor task launch worker for task 8 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:14.959 Executor task launch worker for task 8 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_89,0), (shuffle_0_3_89,1) 20/07/01 11:33:14.959 Executor task launch worker for task 8 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 1 ms 20/07/01 11:33:14.962 Executor task launch worker for task 8 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:14.966 Executor task launch worker for task 8 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.966 Executor task launch worker for task 8 DEBUG TaskMemoryManager: Task 8 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@800df47 20/07/01 11:33:14.969 Executor task launch worker for task 8 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:14.969 Executor task launch worker for task 8 DEBUG TaskMemoryManager: Task 8 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@800df47 20/07/01 11:33:14.969 Executor task launch worker for task 8 DEBUG TaskMemoryManager: Task 8 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@800df47 20/07/01 11:33:14.970 Executor task launch worker for task 8 DEBUG TaskMemoryManager: Task 8 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@800df47 20/07/01 11:33:14.971 Executor task launch worker for task 8 INFO Executor: Finished task 89.0 in stage 2.0 (TID 8). 3379 bytes result sent to driver 20/07/01 11:33:14.972 Executor task launch worker for task 8 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:14.977 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 9 20/07/01 11:33:14.978 Executor task launch worker for task 9 INFO Executor: Running task 102.0 in stage 2.0 (TID 9) 20/07/01 11:33:14.979 Executor task launch worker for task 9 DEBUG Executor: Task 9's epoch is 1 20/07/01 11:33:14.979 Executor task launch worker for task 9 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:14.983 Executor task launch worker for task 9 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:14.983 Executor task launch worker for task 9 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 102-103 20/07/01 11:33:14.983 Executor task launch worker for task 9 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:14.984 Executor task launch worker for task 9 INFO ShuffleBlockFetcherIterator: Getting 2 (160.0 B) non-empty blocks including 2 (160.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:14.984 Executor task launch worker for task 9 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:14.984 Executor task launch worker for task 9 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_102,0), (shuffle_0_3_102,1) 20/07/01 11:33:14.984 Executor task launch worker for task 9 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 1 ms 20/07/01 11:33:14.988 Executor task launch worker for task 9 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:14.992 Executor task launch worker for task 9 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:14.992 Executor task launch worker for task 9 DEBUG TaskMemoryManager: Task 9 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@3d07560e 20/07/01 11:33:14.996 Executor task launch worker for task 9 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:14.996 Executor task launch worker for task 9 DEBUG TaskMemoryManager: Task 9 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@3d07560e 20/07/01 11:33:14.997 Executor task launch worker for task 9 DEBUG TaskMemoryManager: Task 9 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@3d07560e 20/07/01 11:33:14.997 Executor task launch worker for task 9 DEBUG TaskMemoryManager: Task 9 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@3d07560e 20/07/01 11:33:14.999 Executor task launch worker for task 9 INFO Executor: Finished task 102.0 in stage 2.0 (TID 9). 3379 bytes result sent to driver 20/07/01 11:33:14.999 Executor task launch worker for task 9 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.004 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 10 20/07/01 11:33:15.005 Executor task launch worker for task 10 INFO Executor: Running task 103.0 in stage 2.0 (TID 10) 20/07/01 11:33:15.006 Executor task launch worker for task 10 DEBUG Executor: Task 10's epoch is 1 20/07/01 11:33:15.006 Executor task launch worker for task 10 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.011 Executor task launch worker for task 10 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.011 Executor task launch worker for task 10 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 103-104 20/07/01 11:33:15.012 Executor task launch worker for task 10 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.013 Executor task launch worker for task 10 INFO ShuffleBlockFetcherIterator: Getting 2 (160.0 B) non-empty blocks including 2 (160.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.013 Executor task launch worker for task 10 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 1 ms 20/07/01 11:33:15.013 Executor task launch worker for task 10 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_103,0), (shuffle_0_3_103,1) 20/07/01 11:33:15.014 Executor task launch worker for task 10 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 1 ms 20/07/01 11:33:15.017 Executor task launch worker for task 10 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.021 Executor task launch worker for task 10 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.022 Executor task launch worker for task 10 DEBUG TaskMemoryManager: Task 10 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@337f11af 20/07/01 11:33:15.025 Executor task launch worker for task 10 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.026 Executor task launch worker for task 10 DEBUG TaskMemoryManager: Task 10 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@337f11af 20/07/01 11:33:15.026 Executor task launch worker for task 10 DEBUG TaskMemoryManager: Task 10 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@337f11af 20/07/01 11:33:15.026 Executor task launch worker for task 10 DEBUG TaskMemoryManager: Task 10 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@337f11af 20/07/01 11:33:15.028 Executor task launch worker for task 10 INFO Executor: Finished task 103.0 in stage 2.0 (TID 10). 3379 bytes result sent to driver 20/07/01 11:33:15.028 Executor task launch worker for task 10 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.034 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 11 20/07/01 11:33:15.034 Executor task launch worker for task 11 INFO Executor: Running task 107.0 in stage 2.0 (TID 11) 20/07/01 11:33:15.035 Executor task launch worker for task 11 DEBUG Executor: Task 11's epoch is 1 20/07/01 11:33:15.035 Executor task launch worker for task 11 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.041 Executor task launch worker for task 11 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.041 Executor task launch worker for task 11 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 107-108 20/07/01 11:33:15.042 Executor task launch worker for task 11 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.043 Executor task launch worker for task 11 INFO ShuffleBlockFetcherIterator: Getting 2 (160.0 B) non-empty blocks including 2 (160.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.043 Executor task launch worker for task 11 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 1 ms 20/07/01 11:33:15.043 Executor task launch worker for task 11 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_107,0), (shuffle_0_3_107,1) 20/07/01 11:33:15.043 Executor task launch worker for task 11 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 1 ms 20/07/01 11:33:15.048 Executor task launch worker for task 11 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.053 Executor task launch worker for task 11 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.054 Executor task launch worker for task 11 DEBUG TaskMemoryManager: Task 11 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@3bcefbf4 20/07/01 11:33:15.058 Executor task launch worker for task 11 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.059 Executor task launch worker for task 11 DEBUG TaskMemoryManager: Task 11 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@3bcefbf4 20/07/01 11:33:15.060 Executor task launch worker for task 11 DEBUG TaskMemoryManager: Task 11 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@3bcefbf4 20/07/01 11:33:15.060 Executor task launch worker for task 11 DEBUG TaskMemoryManager: Task 11 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@3bcefbf4 20/07/01 11:33:15.062 Executor task launch worker for task 11 INFO Executor: Finished task 107.0 in stage 2.0 (TID 11). 3379 bytes result sent to driver 20/07/01 11:33:15.063 Executor task launch worker for task 11 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.067 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 12 20/07/01 11:33:15.068 Executor task launch worker for task 12 INFO Executor: Running task 174.0 in stage 2.0 (TID 12) 20/07/01 11:33:15.069 Executor task launch worker for task 12 DEBUG Executor: Task 12's epoch is 1 20/07/01 11:33:15.069 Executor task launch worker for task 12 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.074 Executor task launch worker for task 12 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.074 Executor task launch worker for task 12 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 174-175 20/07/01 11:33:15.075 Executor task launch worker for task 12 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.075 Executor task launch worker for task 12 INFO ShuffleBlockFetcherIterator: Getting 2 (160.0 B) non-empty blocks including 2 (160.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.076 Executor task launch worker for task 12 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.076 Executor task launch worker for task 12 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_174,0), (shuffle_0_3_174,1) 20/07/01 11:33:15.076 Executor task launch worker for task 12 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 1 ms 20/07/01 11:33:15.079 Executor task launch worker for task 12 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.083 Executor task launch worker for task 12 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.084 Executor task launch worker for task 12 DEBUG TaskMemoryManager: Task 12 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@5d797a98 20/07/01 11:33:15.087 Executor task launch worker for task 12 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.088 Executor task launch worker for task 12 DEBUG TaskMemoryManager: Task 12 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@5d797a98 20/07/01 11:33:15.088 Executor task launch worker for task 12 DEBUG TaskMemoryManager: Task 12 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@5d797a98 20/07/01 11:33:15.088 Executor task launch worker for task 12 DEBUG TaskMemoryManager: Task 12 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@5d797a98 20/07/01 11:33:15.090 Executor task launch worker for task 12 INFO Executor: Finished task 174.0 in stage 2.0 (TID 12). 3379 bytes result sent to driver 20/07/01 11:33:15.091 Executor task launch worker for task 12 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.096 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 13 20/07/01 11:33:15.096 Executor task launch worker for task 13 INFO Executor: Running task 191.0 in stage 2.0 (TID 13) 20/07/01 11:33:15.097 Executor task launch worker for task 13 DEBUG Executor: Task 13's epoch is 1 20/07/01 11:33:15.097 Executor task launch worker for task 13 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.103 Executor task launch worker for task 13 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.103 Executor task launch worker for task 13 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 191-192 20/07/01 11:33:15.104 Executor task launch worker for task 13 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.105 Executor task launch worker for task 13 INFO ShuffleBlockFetcherIterator: Getting 2 (144.0 B) non-empty blocks including 2 (144.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.105 Executor task launch worker for task 13 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.105 Executor task launch worker for task 13 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: (shuffle_0_2_191,0), (shuffle_0_3_191,1) 20/07/01 11:33:15.106 Executor task launch worker for task 13 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 1 ms 20/07/01 11:33:15.110 Executor task launch worker for task 13 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.115 Executor task launch worker for task 13 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.115 Executor task launch worker for task 13 DEBUG TaskMemoryManager: Task 13 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@2f59eb0e 20/07/01 11:33:15.118 Executor task launch worker for task 13 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.119 Executor task launch worker for task 13 DEBUG TaskMemoryManager: Task 13 acquired 16.0 MiB for org.apache.spark.unsafe.map.BytesToBytesMap@2f59eb0e 20/07/01 11:33:15.119 Executor task launch worker for task 13 DEBUG TaskMemoryManager: Task 13 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@2f59eb0e 20/07/01 11:33:15.119 Executor task launch worker for task 13 DEBUG TaskMemoryManager: Task 13 release 16.0 MiB from org.apache.spark.unsafe.map.BytesToBytesMap@2f59eb0e 20/07/01 11:33:15.121 Executor task launch worker for task 13 INFO Executor: Finished task 191.0 in stage 2.0 (TID 13). 3372 bytes result sent to driver 20/07/01 11:33:15.122 Executor task launch worker for task 13 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.128 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 14 20/07/01 11:33:15.128 Executor task launch worker for task 14 INFO Executor: Running task 0.0 in stage 2.0 (TID 14) 20/07/01 11:33:15.129 Executor task launch worker for task 14 DEBUG Executor: Task 14's epoch is 1 20/07/01 11:33:15.129 Executor task launch worker for task 14 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.133 Executor task launch worker for task 14 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.133 Executor task launch worker for task 14 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 0-1 20/07/01 11:33:15.133 Executor task launch worker for task 14 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.134 Executor task launch worker for task 14 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.134 Executor task launch worker for task 14 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.134 Executor task launch worker for task 14 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.134 Executor task launch worker for task 14 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.137 Executor task launch worker for task 14 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.139 Executor task launch worker for task 14 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.140 Executor task launch worker for task 14 DEBUG TaskMemoryManager: Task 14 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@76f55d40 20/07/01 11:33:15.142 Executor task launch worker for task 14 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.142 Executor task launch worker for task 14 DEBUG TaskMemoryManager: Task 14 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@76f55d40 20/07/01 11:33:15.144 Executor task launch worker for task 14 INFO Executor: Finished task 0.0 in stage 2.0 (TID 14). 3346 bytes result sent to driver 20/07/01 11:33:15.144 Executor task launch worker for task 14 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.150 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 15 20/07/01 11:33:15.151 Executor task launch worker for task 15 INFO Executor: Running task 1.0 in stage 2.0 (TID 15) 20/07/01 11:33:15.151 Executor task launch worker for task 15 DEBUG Executor: Task 15's epoch is 1 20/07/01 11:33:15.151 Executor task launch worker for task 15 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.156 Executor task launch worker for task 15 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.156 Executor task launch worker for task 15 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 1-2 20/07/01 11:33:15.156 Executor task launch worker for task 15 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.156 Executor task launch worker for task 15 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.157 Executor task launch worker for task 15 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.157 Executor task launch worker for task 15 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.157 Executor task launch worker for task 15 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.159 Executor task launch worker for task 15 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.162 Executor task launch worker for task 15 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.163 Executor task launch worker for task 15 DEBUG TaskMemoryManager: Task 15 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@546c6ab7 20/07/01 11:33:15.165 Executor task launch worker for task 15 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.165 Executor task launch worker for task 15 DEBUG TaskMemoryManager: Task 15 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@546c6ab7 20/07/01 11:33:15.166 Executor task launch worker for task 15 INFO Executor: Finished task 1.0 in stage 2.0 (TID 15). 3346 bytes result sent to driver 20/07/01 11:33:15.167 Executor task launch worker for task 15 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.171 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 16 20/07/01 11:33:15.172 Executor task launch worker for task 16 INFO Executor: Running task 2.0 in stage 2.0 (TID 16) 20/07/01 11:33:15.172 Executor task launch worker for task 16 DEBUG Executor: Task 16's epoch is 1 20/07/01 11:33:15.172 Executor task launch worker for task 16 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.176 Executor task launch worker for task 16 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.176 Executor task launch worker for task 16 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 2-3 20/07/01 11:33:15.176 Executor task launch worker for task 16 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.177 Executor task launch worker for task 16 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.177 Executor task launch worker for task 16 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.177 Executor task launch worker for task 16 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.177 Executor task launch worker for task 16 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.180 Executor task launch worker for task 16 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.183 Executor task launch worker for task 16 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.183 Executor task launch worker for task 16 DEBUG TaskMemoryManager: Task 16 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@65854257 20/07/01 11:33:15.185 Executor task launch worker for task 16 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.186 Executor task launch worker for task 16 DEBUG TaskMemoryManager: Task 16 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@65854257 20/07/01 11:33:15.187 Executor task launch worker for task 16 INFO Executor: Finished task 2.0 in stage 2.0 (TID 16). 3346 bytes result sent to driver 20/07/01 11:33:15.187 Executor task launch worker for task 16 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.191 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 17 20/07/01 11:33:15.192 Executor task launch worker for task 17 INFO Executor: Running task 3.0 in stage 2.0 (TID 17) 20/07/01 11:33:15.193 Executor task launch worker for task 17 DEBUG Executor: Task 17's epoch is 1 20/07/01 11:33:15.193 Executor task launch worker for task 17 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.197 Executor task launch worker for task 17 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.197 Executor task launch worker for task 17 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 3-4 20/07/01 11:33:15.198 Executor task launch worker for task 17 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.198 Executor task launch worker for task 17 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.198 Executor task launch worker for task 17 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.198 Executor task launch worker for task 17 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.198 Executor task launch worker for task 17 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.201 Executor task launch worker for task 17 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.205 Executor task launch worker for task 17 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.205 Executor task launch worker for task 17 DEBUG TaskMemoryManager: Task 17 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4dd1906 20/07/01 11:33:15.208 Executor task launch worker for task 17 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.209 Executor task launch worker for task 17 DEBUG TaskMemoryManager: Task 17 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4dd1906 20/07/01 11:33:15.210 Executor task launch worker for task 17 INFO Executor: Finished task 3.0 in stage 2.0 (TID 17). 3346 bytes result sent to driver 20/07/01 11:33:15.210 Executor task launch worker for task 17 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.215 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 18 20/07/01 11:33:15.216 Executor task launch worker for task 18 INFO Executor: Running task 4.0 in stage 2.0 (TID 18) 20/07/01 11:33:15.217 Executor task launch worker for task 18 DEBUG Executor: Task 18's epoch is 1 20/07/01 11:33:15.217 Executor task launch worker for task 18 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.220 Executor task launch worker for task 18 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.220 Executor task launch worker for task 18 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 4-5 20/07/01 11:33:15.221 Executor task launch worker for task 18 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.221 Executor task launch worker for task 18 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.221 Executor task launch worker for task 18 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.221 Executor task launch worker for task 18 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.221 Executor task launch worker for task 18 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.224 Executor task launch worker for task 18 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.227 Executor task launch worker for task 18 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.228 Executor task launch worker for task 18 DEBUG TaskMemoryManager: Task 18 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@87888f3 20/07/01 11:33:15.230 Executor task launch worker for task 18 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.230 Executor task launch worker for task 18 DEBUG TaskMemoryManager: Task 18 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@87888f3 20/07/01 11:33:15.231 Executor task launch worker for task 18 INFO Executor: Finished task 4.0 in stage 2.0 (TID 18). 3346 bytes result sent to driver 20/07/01 11:33:15.232 Executor task launch worker for task 18 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.237 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 19 20/07/01 11:33:15.237 Executor task launch worker for task 19 INFO Executor: Running task 5.0 in stage 2.0 (TID 19) 20/07/01 11:33:15.238 Executor task launch worker for task 19 DEBUG Executor: Task 19's epoch is 1 20/07/01 11:33:15.238 Executor task launch worker for task 19 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.243 Executor task launch worker for task 19 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.244 Executor task launch worker for task 19 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 5-6 20/07/01 11:33:15.244 Executor task launch worker for task 19 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.245 Executor task launch worker for task 19 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.245 Executor task launch worker for task 19 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.245 Executor task launch worker for task 19 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.245 Executor task launch worker for task 19 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.248 Executor task launch worker for task 19 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.253 Executor task launch worker for task 19 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.254 Executor task launch worker for task 19 DEBUG TaskMemoryManager: Task 19 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@38c0a88c 20/07/01 11:33:15.257 Executor task launch worker for task 19 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.257 Executor task launch worker for task 19 DEBUG TaskMemoryManager: Task 19 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@38c0a88c 20/07/01 11:33:15.259 Executor task launch worker for task 19 INFO Executor: Finished task 5.0 in stage 2.0 (TID 19). 3346 bytes result sent to driver 20/07/01 11:33:15.259 Executor task launch worker for task 19 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.264 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 20 20/07/01 11:33:15.264 Executor task launch worker for task 20 INFO Executor: Running task 6.0 in stage 2.0 (TID 20) 20/07/01 11:33:15.265 Executor task launch worker for task 20 DEBUG Executor: Task 20's epoch is 1 20/07/01 11:33:15.265 Executor task launch worker for task 20 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.269 Executor task launch worker for task 20 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.269 Executor task launch worker for task 20 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 6-7 20/07/01 11:33:15.270 Executor task launch worker for task 20 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.271 Executor task launch worker for task 20 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.271 Executor task launch worker for task 20 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.271 Executor task launch worker for task 20 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.271 Executor task launch worker for task 20 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.274 Executor task launch worker for task 20 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.278 Executor task launch worker for task 20 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.279 Executor task launch worker for task 20 DEBUG TaskMemoryManager: Task 20 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@344cfbbe 20/07/01 11:33:15.281 Executor task launch worker for task 20 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.282 Executor task launch worker for task 20 DEBUG TaskMemoryManager: Task 20 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@344cfbbe 20/07/01 11:33:15.283 Executor task launch worker for task 20 INFO Executor: Finished task 6.0 in stage 2.0 (TID 20). 3346 bytes result sent to driver 20/07/01 11:33:15.284 Executor task launch worker for task 20 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.289 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 21 20/07/01 11:33:15.290 Executor task launch worker for task 21 INFO Executor: Running task 7.0 in stage 2.0 (TID 21) 20/07/01 11:33:15.291 Executor task launch worker for task 21 DEBUG Executor: Task 21's epoch is 1 20/07/01 11:33:15.291 Executor task launch worker for task 21 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.296 Executor task launch worker for task 21 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.296 Executor task launch worker for task 21 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 7-8 20/07/01 11:33:15.297 Executor task launch worker for task 21 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.297 Executor task launch worker for task 21 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.297 Executor task launch worker for task 21 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.297 Executor task launch worker for task 21 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.297 Executor task launch worker for task 21 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.301 Executor task launch worker for task 21 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.305 Executor task launch worker for task 21 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.305 Executor task launch worker for task 21 DEBUG TaskMemoryManager: Task 21 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@12fd91f3 20/07/01 11:33:15.308 Executor task launch worker for task 21 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.309 Executor task launch worker for task 21 DEBUG TaskMemoryManager: Task 21 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@12fd91f3 20/07/01 11:33:15.311 Executor task launch worker for task 21 INFO Executor: Finished task 7.0 in stage 2.0 (TID 21). 3346 bytes result sent to driver 20/07/01 11:33:15.311 Executor task launch worker for task 21 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.317 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 22 20/07/01 11:33:15.317 Executor task launch worker for task 22 INFO Executor: Running task 8.0 in stage 2.0 (TID 22) 20/07/01 11:33:15.318 Executor task launch worker for task 22 DEBUG Executor: Task 22's epoch is 1 20/07/01 11:33:15.318 Executor task launch worker for task 22 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.322 Executor task launch worker for task 22 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.322 Executor task launch worker for task 22 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 8-9 20/07/01 11:33:15.322 Executor task launch worker for task 22 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.323 Executor task launch worker for task 22 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.323 Executor task launch worker for task 22 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.323 Executor task launch worker for task 22 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.323 Executor task launch worker for task 22 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.326 Executor task launch worker for task 22 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.329 Executor task launch worker for task 22 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.329 Executor task launch worker for task 22 DEBUG TaskMemoryManager: Task 22 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6d1336fa 20/07/01 11:33:15.332 Executor task launch worker for task 22 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.333 Executor task launch worker for task 22 DEBUG TaskMemoryManager: Task 22 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6d1336fa 20/07/01 11:33:15.334 Executor task launch worker for task 22 INFO Executor: Finished task 8.0 in stage 2.0 (TID 22). 3346 bytes result sent to driver 20/07/01 11:33:15.335 Executor task launch worker for task 22 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.340 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 23 20/07/01 11:33:15.340 Executor task launch worker for task 23 INFO Executor: Running task 9.0 in stage 2.0 (TID 23) 20/07/01 11:33:15.341 Executor task launch worker for task 23 DEBUG Executor: Task 23's epoch is 1 20/07/01 11:33:15.341 Executor task launch worker for task 23 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.345 Executor task launch worker for task 23 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.345 Executor task launch worker for task 23 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 9-10 20/07/01 11:33:15.346 Executor task launch worker for task 23 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.346 Executor task launch worker for task 23 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.346 Executor task launch worker for task 23 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.346 Executor task launch worker for task 23 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.346 Executor task launch worker for task 23 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.349 Executor task launch worker for task 23 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.353 Executor task launch worker for task 23 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.353 Executor task launch worker for task 23 DEBUG TaskMemoryManager: Task 23 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@41689c25 20/07/01 11:33:15.356 Executor task launch worker for task 23 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.357 Executor task launch worker for task 23 DEBUG TaskMemoryManager: Task 23 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@41689c25 20/07/01 11:33:15.358 Executor task launch worker for task 23 INFO Executor: Finished task 9.0 in stage 2.0 (TID 23). 3346 bytes result sent to driver 20/07/01 11:33:15.359 Executor task launch worker for task 23 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.363 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 24 20/07/01 11:33:15.364 Executor task launch worker for task 24 INFO Executor: Running task 10.0 in stage 2.0 (TID 24) 20/07/01 11:33:15.365 Executor task launch worker for task 24 DEBUG Executor: Task 24's epoch is 1 20/07/01 11:33:15.365 Executor task launch worker for task 24 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.369 Executor task launch worker for task 24 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.369 Executor task launch worker for task 24 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 10-11 20/07/01 11:33:15.369 Executor task launch worker for task 24 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.370 Executor task launch worker for task 24 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.370 Executor task launch worker for task 24 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.370 Executor task launch worker for task 24 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.370 Executor task launch worker for task 24 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.372 Executor task launch worker for task 24 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.375 Executor task launch worker for task 24 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.376 Executor task launch worker for task 24 DEBUG TaskMemoryManager: Task 24 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@29dd1fe9 20/07/01 11:33:15.378 Executor task launch worker for task 24 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.378 Executor task launch worker for task 24 DEBUG TaskMemoryManager: Task 24 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@29dd1fe9 20/07/01 11:33:15.380 Executor task launch worker for task 24 INFO Executor: Finished task 10.0 in stage 2.0 (TID 24). 3346 bytes result sent to driver 20/07/01 11:33:15.381 Executor task launch worker for task 24 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.387 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 25 20/07/01 11:33:15.388 Executor task launch worker for task 25 INFO Executor: Running task 11.0 in stage 2.0 (TID 25) 20/07/01 11:33:15.389 Executor task launch worker for task 25 DEBUG Executor: Task 25's epoch is 1 20/07/01 11:33:15.389 Executor task launch worker for task 25 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.394 Executor task launch worker for task 25 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.394 Executor task launch worker for task 25 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 11-12 20/07/01 11:33:15.394 Executor task launch worker for task 25 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.395 Executor task launch worker for task 25 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.395 Executor task launch worker for task 25 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.395 Executor task launch worker for task 25 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.395 Executor task launch worker for task 25 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.398 Executor task launch worker for task 25 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.403 Executor task launch worker for task 25 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.403 Executor task launch worker for task 25 DEBUG TaskMemoryManager: Task 25 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@57a514ed 20/07/01 11:33:15.406 Executor task launch worker for task 25 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.407 Executor task launch worker for task 25 DEBUG TaskMemoryManager: Task 25 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@57a514ed 20/07/01 11:33:15.408 Executor task launch worker for task 25 INFO Executor: Finished task 11.0 in stage 2.0 (TID 25). 3346 bytes result sent to driver 20/07/01 11:33:15.409 Executor task launch worker for task 25 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.416 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 26 20/07/01 11:33:15.417 Executor task launch worker for task 26 INFO Executor: Running task 12.0 in stage 2.0 (TID 26) 20/07/01 11:33:15.418 Executor task launch worker for task 26 DEBUG Executor: Task 26's epoch is 1 20/07/01 11:33:15.418 Executor task launch worker for task 26 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.422 Executor task launch worker for task 26 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.422 Executor task launch worker for task 26 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 12-13 20/07/01 11:33:15.423 Executor task launch worker for task 26 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.424 Executor task launch worker for task 26 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.424 Executor task launch worker for task 26 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.424 Executor task launch worker for task 26 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.424 Executor task launch worker for task 26 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.427 Executor task launch worker for task 26 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.431 Executor task launch worker for task 26 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.432 Executor task launch worker for task 26 DEBUG TaskMemoryManager: Task 26 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@321e4e56 20/07/01 11:33:15.435 Executor task launch worker for task 26 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.435 Executor task launch worker for task 26 DEBUG TaskMemoryManager: Task 26 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@321e4e56 20/07/01 11:33:15.437 Executor task launch worker for task 26 INFO Executor: Finished task 12.0 in stage 2.0 (TID 26). 3346 bytes result sent to driver 20/07/01 11:33:15.437 Executor task launch worker for task 26 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.442 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 27 20/07/01 11:33:15.442 Executor task launch worker for task 27 INFO Executor: Running task 13.0 in stage 2.0 (TID 27) 20/07/01 11:33:15.444 Executor task launch worker for task 27 DEBUG Executor: Task 27's epoch is 1 20/07/01 11:33:15.444 Executor task launch worker for task 27 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.448 Executor task launch worker for task 27 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.448 Executor task launch worker for task 27 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 13-14 20/07/01 11:33:15.449 Executor task launch worker for task 27 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.449 Executor task launch worker for task 27 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.449 Executor task launch worker for task 27 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.450 Executor task launch worker for task 27 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.450 Executor task launch worker for task 27 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.453 Executor task launch worker for task 27 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.457 Executor task launch worker for task 27 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.457 Executor task launch worker for task 27 DEBUG TaskMemoryManager: Task 27 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@517a1e7c 20/07/01 11:33:15.460 Executor task launch worker for task 27 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.461 Executor task launch worker for task 27 DEBUG TaskMemoryManager: Task 27 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@517a1e7c 20/07/01 11:33:15.463 Executor task launch worker for task 27 INFO Executor: Finished task 13.0 in stage 2.0 (TID 27). 3346 bytes result sent to driver 20/07/01 11:33:15.463 Executor task launch worker for task 27 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.468 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 28 20/07/01 11:33:15.469 Executor task launch worker for task 28 INFO Executor: Running task 14.0 in stage 2.0 (TID 28) 20/07/01 11:33:15.469 Executor task launch worker for task 28 DEBUG Executor: Task 28's epoch is 1 20/07/01 11:33:15.470 Executor task launch worker for task 28 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.473 Executor task launch worker for task 28 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.473 Executor task launch worker for task 28 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 14-15 20/07/01 11:33:15.474 Executor task launch worker for task 28 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.474 Executor task launch worker for task 28 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.475 Executor task launch worker for task 28 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.475 Executor task launch worker for task 28 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.475 Executor task launch worker for task 28 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.478 Executor task launch worker for task 28 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.482 Executor task launch worker for task 28 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.482 Executor task launch worker for task 28 DEBUG TaskMemoryManager: Task 28 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@2c06b5e8 20/07/01 11:33:15.485 Executor task launch worker for task 28 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.486 Executor task launch worker for task 28 DEBUG TaskMemoryManager: Task 28 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@2c06b5e8 20/07/01 11:33:15.487 Executor task launch worker for task 28 INFO Executor: Finished task 14.0 in stage 2.0 (TID 28). 3346 bytes result sent to driver 20/07/01 11:33:15.488 Executor task launch worker for task 28 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.494 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 29 20/07/01 11:33:15.494 Executor task launch worker for task 29 INFO Executor: Running task 15.0 in stage 2.0 (TID 29) 20/07/01 11:33:15.495 Executor task launch worker for task 29 DEBUG Executor: Task 29's epoch is 1 20/07/01 11:33:15.495 Executor task launch worker for task 29 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.499 Executor task launch worker for task 29 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.499 Executor task launch worker for task 29 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 15-16 20/07/01 11:33:15.500 Executor task launch worker for task 29 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.501 Executor task launch worker for task 29 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.501 Executor task launch worker for task 29 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.501 Executor task launch worker for task 29 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.501 Executor task launch worker for task 29 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.504 Executor task launch worker for task 29 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.507 Executor task launch worker for task 29 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.508 Executor task launch worker for task 29 DEBUG TaskMemoryManager: Task 29 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@2c887368 20/07/01 11:33:15.511 Executor task launch worker for task 29 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.512 Executor task launch worker for task 29 DEBUG TaskMemoryManager: Task 29 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@2c887368 20/07/01 11:33:15.513 Executor task launch worker for task 29 INFO Executor: Finished task 15.0 in stage 2.0 (TID 29). 3346 bytes result sent to driver 20/07/01 11:33:15.514 Executor task launch worker for task 29 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.520 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 30 20/07/01 11:33:15.521 Executor task launch worker for task 30 INFO Executor: Running task 16.0 in stage 2.0 (TID 30) 20/07/01 11:33:15.522 Executor task launch worker for task 30 DEBUG Executor: Task 30's epoch is 1 20/07/01 11:33:15.522 Executor task launch worker for task 30 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.529 Executor task launch worker for task 30 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.529 Executor task launch worker for task 30 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 16-17 20/07/01 11:33:15.530 Executor task launch worker for task 30 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.530 Executor task launch worker for task 30 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.530 Executor task launch worker for task 30 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.530 Executor task launch worker for task 30 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.530 Executor task launch worker for task 30 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.533 Executor task launch worker for task 30 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.537 Executor task launch worker for task 30 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.537 Executor task launch worker for task 30 DEBUG TaskMemoryManager: Task 30 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@393dc09c 20/07/01 11:33:15.540 Executor task launch worker for task 30 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.541 Executor task launch worker for task 30 DEBUG TaskMemoryManager: Task 30 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@393dc09c 20/07/01 11:33:15.542 Executor task launch worker for task 30 INFO Executor: Finished task 16.0 in stage 2.0 (TID 30). 3346 bytes result sent to driver 20/07/01 11:33:15.543 Executor task launch worker for task 30 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.548 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 31 20/07/01 11:33:15.548 Executor task launch worker for task 31 INFO Executor: Running task 17.0 in stage 2.0 (TID 31) 20/07/01 11:33:15.549 Executor task launch worker for task 31 DEBUG Executor: Task 31's epoch is 1 20/07/01 11:33:15.549 Executor task launch worker for task 31 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.553 Executor task launch worker for task 31 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.554 Executor task launch worker for task 31 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 17-18 20/07/01 11:33:15.554 Executor task launch worker for task 31 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.555 Executor task launch worker for task 31 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.555 Executor task launch worker for task 31 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.555 Executor task launch worker for task 31 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.555 Executor task launch worker for task 31 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.558 Executor task launch worker for task 31 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.561 Executor task launch worker for task 31 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.563 Executor task launch worker for task 31 DEBUG TaskMemoryManager: Task 31 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@66fa4ddc 20/07/01 11:33:15.566 Executor task launch worker for task 31 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.566 Executor task launch worker for task 31 DEBUG TaskMemoryManager: Task 31 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@66fa4ddc 20/07/01 11:33:15.568 Executor task launch worker for task 31 INFO Executor: Finished task 17.0 in stage 2.0 (TID 31). 3346 bytes result sent to driver 20/07/01 11:33:15.568 Executor task launch worker for task 31 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.573 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 32 20/07/01 11:33:15.574 Executor task launch worker for task 32 INFO Executor: Running task 18.0 in stage 2.0 (TID 32) 20/07/01 11:33:15.574 Executor task launch worker for task 32 DEBUG Executor: Task 32's epoch is 1 20/07/01 11:33:15.574 Executor task launch worker for task 32 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.577 Executor task launch worker for task 32 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.577 Executor task launch worker for task 32 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 18-19 20/07/01 11:33:15.578 Executor task launch worker for task 32 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.578 Executor task launch worker for task 32 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.578 Executor task launch worker for task 32 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.578 Executor task launch worker for task 32 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.578 Executor task launch worker for task 32 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.581 Executor task launch worker for task 32 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.583 Executor task launch worker for task 32 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.584 Executor task launch worker for task 32 DEBUG TaskMemoryManager: Task 32 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@70f7cbce 20/07/01 11:33:15.586 Executor task launch worker for task 32 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.586 Executor task launch worker for task 32 DEBUG TaskMemoryManager: Task 32 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@70f7cbce 20/07/01 11:33:15.587 Executor task launch worker for task 32 INFO Executor: Finished task 18.0 in stage 2.0 (TID 32). 3346 bytes result sent to driver 20/07/01 11:33:15.588 Executor task launch worker for task 32 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.592 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 33 20/07/01 11:33:15.593 Executor task launch worker for task 33 INFO Executor: Running task 19.0 in stage 2.0 (TID 33) 20/07/01 11:33:15.594 Executor task launch worker for task 33 DEBUG Executor: Task 33's epoch is 1 20/07/01 11:33:15.594 Executor task launch worker for task 33 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.596 Executor task launch worker for task 33 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.596 Executor task launch worker for task 33 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 19-20 20/07/01 11:33:15.597 Executor task launch worker for task 33 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.597 Executor task launch worker for task 33 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.597 Executor task launch worker for task 33 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.597 Executor task launch worker for task 33 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.597 Executor task launch worker for task 33 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.599 Executor task launch worker for task 33 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.602 Executor task launch worker for task 33 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.603 Executor task launch worker for task 33 DEBUG TaskMemoryManager: Task 33 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@7f614ced 20/07/01 11:33:15.605 Executor task launch worker for task 33 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.605 Executor task launch worker for task 33 DEBUG TaskMemoryManager: Task 33 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@7f614ced 20/07/01 11:33:15.607 Executor task launch worker for task 33 INFO Executor: Finished task 19.0 in stage 2.0 (TID 33). 3346 bytes result sent to driver 20/07/01 11:33:15.607 Executor task launch worker for task 33 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.612 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 34 20/07/01 11:33:15.613 Executor task launch worker for task 34 INFO Executor: Running task 20.0 in stage 2.0 (TID 34) 20/07/01 11:33:15.614 Executor task launch worker for task 34 DEBUG Executor: Task 34's epoch is 1 20/07/01 11:33:15.614 Executor task launch worker for task 34 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.618 Executor task launch worker for task 34 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.618 Executor task launch worker for task 34 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 20-21 20/07/01 11:33:15.619 Executor task launch worker for task 34 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.619 Executor task launch worker for task 34 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.619 Executor task launch worker for task 34 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.619 Executor task launch worker for task 34 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.619 Executor task launch worker for task 34 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.622 Executor task launch worker for task 34 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.625 Executor task launch worker for task 34 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.626 Executor task launch worker for task 34 DEBUG TaskMemoryManager: Task 34 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4bad70d2 20/07/01 11:33:15.628 Executor task launch worker for task 34 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.628 Executor task launch worker for task 34 DEBUG TaskMemoryManager: Task 34 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4bad70d2 20/07/01 11:33:15.629 Executor task launch worker for task 34 INFO Executor: Finished task 20.0 in stage 2.0 (TID 34). 3346 bytes result sent to driver 20/07/01 11:33:15.629 Executor task launch worker for task 34 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.634 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 35 20/07/01 11:33:15.635 Executor task launch worker for task 35 INFO Executor: Running task 21.0 in stage 2.0 (TID 35) 20/07/01 11:33:15.636 Executor task launch worker for task 35 DEBUG Executor: Task 35's epoch is 1 20/07/01 11:33:15.636 Executor task launch worker for task 35 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.638 Executor task launch worker for task 35 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.639 Executor task launch worker for task 35 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 21-22 20/07/01 11:33:15.639 Executor task launch worker for task 35 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.639 Executor task launch worker for task 35 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.639 Executor task launch worker for task 35 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.639 Executor task launch worker for task 35 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.639 Executor task launch worker for task 35 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.641 Executor task launch worker for task 35 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.643 Executor task launch worker for task 35 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.644 Executor task launch worker for task 35 DEBUG TaskMemoryManager: Task 35 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@60dcc05f 20/07/01 11:33:15.646 Executor task launch worker for task 35 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.647 Executor task launch worker for task 35 DEBUG TaskMemoryManager: Task 35 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@60dcc05f 20/07/01 11:33:15.648 Executor task launch worker for task 35 INFO Executor: Finished task 21.0 in stage 2.0 (TID 35). 3346 bytes result sent to driver 20/07/01 11:33:15.648 Executor task launch worker for task 35 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.653 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 36 20/07/01 11:33:15.653 Executor task launch worker for task 36 INFO Executor: Running task 22.0 in stage 2.0 (TID 36) 20/07/01 11:33:15.654 Executor task launch worker for task 36 DEBUG Executor: Task 36's epoch is 1 20/07/01 11:33:15.654 Executor task launch worker for task 36 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.657 Executor task launch worker for task 36 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.657 Executor task launch worker for task 36 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 22-23 20/07/01 11:33:15.657 Executor task launch worker for task 36 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.658 Executor task launch worker for task 36 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.658 Executor task launch worker for task 36 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.658 Executor task launch worker for task 36 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.658 Executor task launch worker for task 36 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.660 Executor task launch worker for task 36 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.662 Executor task launch worker for task 36 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.663 Executor task launch worker for task 36 DEBUG TaskMemoryManager: Task 36 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4ff57d31 20/07/01 11:33:15.664 Executor task launch worker for task 36 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.665 Executor task launch worker for task 36 DEBUG TaskMemoryManager: Task 36 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4ff57d31 20/07/01 11:33:15.666 Executor task launch worker for task 36 INFO Executor: Finished task 22.0 in stage 2.0 (TID 36). 3346 bytes result sent to driver 20/07/01 11:33:15.666 Executor task launch worker for task 36 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.670 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 37 20/07/01 11:33:15.670 Executor task launch worker for task 37 INFO Executor: Running task 23.0 in stage 2.0 (TID 37) 20/07/01 11:33:15.671 Executor task launch worker for task 37 DEBUG Executor: Task 37's epoch is 1 20/07/01 11:33:15.671 Executor task launch worker for task 37 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.675 Executor task launch worker for task 37 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.675 Executor task launch worker for task 37 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 23-24 20/07/01 11:33:15.675 Executor task launch worker for task 37 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.675 Executor task launch worker for task 37 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.675 Executor task launch worker for task 37 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.675 Executor task launch worker for task 37 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.675 Executor task launch worker for task 37 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.677 Executor task launch worker for task 37 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.679 Executor task launch worker for task 37 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.680 Executor task launch worker for task 37 DEBUG TaskMemoryManager: Task 37 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@67b76ffe 20/07/01 11:33:15.681 Executor task launch worker for task 37 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.682 Executor task launch worker for task 37 DEBUG TaskMemoryManager: Task 37 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@67b76ffe 20/07/01 11:33:15.683 Executor task launch worker for task 37 INFO Executor: Finished task 23.0 in stage 2.0 (TID 37). 3346 bytes result sent to driver 20/07/01 11:33:15.683 Executor task launch worker for task 37 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.688 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 38 20/07/01 11:33:15.688 Executor task launch worker for task 38 INFO Executor: Running task 24.0 in stage 2.0 (TID 38) 20/07/01 11:33:15.689 Executor task launch worker for task 38 DEBUG Executor: Task 38's epoch is 1 20/07/01 11:33:15.689 Executor task launch worker for task 38 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.692 Executor task launch worker for task 38 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.692 Executor task launch worker for task 38 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 24-25 20/07/01 11:33:15.692 Executor task launch worker for task 38 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.693 Executor task launch worker for task 38 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.693 Executor task launch worker for task 38 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.693 Executor task launch worker for task 38 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.693 Executor task launch worker for task 38 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.695 Executor task launch worker for task 38 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.697 Executor task launch worker for task 38 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.697 Executor task launch worker for task 38 DEBUG TaskMemoryManager: Task 38 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@253b0b00 20/07/01 11:33:15.699 Executor task launch worker for task 38 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.700 Executor task launch worker for task 38 DEBUG TaskMemoryManager: Task 38 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@253b0b00 20/07/01 11:33:15.701 Executor task launch worker for task 38 INFO Executor: Finished task 24.0 in stage 2.0 (TID 38). 3346 bytes result sent to driver 20/07/01 11:33:15.701 Executor task launch worker for task 38 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.706 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 39 20/07/01 11:33:15.707 Executor task launch worker for task 39 INFO Executor: Running task 25.0 in stage 2.0 (TID 39) 20/07/01 11:33:15.707 Executor task launch worker for task 39 DEBUG Executor: Task 39's epoch is 1 20/07/01 11:33:15.707 Executor task launch worker for task 39 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.711 Executor task launch worker for task 39 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.711 Executor task launch worker for task 39 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 25-26 20/07/01 11:33:15.712 Executor task launch worker for task 39 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.712 Executor task launch worker for task 39 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.712 Executor task launch worker for task 39 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.712 Executor task launch worker for task 39 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.712 Executor task launch worker for task 39 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.715 Executor task launch worker for task 39 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.717 Executor task launch worker for task 39 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.718 Executor task launch worker for task 39 DEBUG TaskMemoryManager: Task 39 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@214dfed9 20/07/01 11:33:15.720 Executor task launch worker for task 39 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.721 Executor task launch worker for task 39 DEBUG TaskMemoryManager: Task 39 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@214dfed9 20/07/01 11:33:15.722 Executor task launch worker for task 39 INFO Executor: Finished task 25.0 in stage 2.0 (TID 39). 3346 bytes result sent to driver 20/07/01 11:33:15.723 Executor task launch worker for task 39 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.728 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 40 20/07/01 11:33:15.729 Executor task launch worker for task 40 INFO Executor: Running task 26.0 in stage 2.0 (TID 40) 20/07/01 11:33:15.730 Executor task launch worker for task 40 DEBUG Executor: Task 40's epoch is 1 20/07/01 11:33:15.730 Executor task launch worker for task 40 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.733 Executor task launch worker for task 40 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.733 Executor task launch worker for task 40 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 26-27 20/07/01 11:33:15.733 Executor task launch worker for task 40 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.733 Executor task launch worker for task 40 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.733 Executor task launch worker for task 40 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.733 Executor task launch worker for task 40 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.733 Executor task launch worker for task 40 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.735 Executor task launch worker for task 40 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.737 Executor task launch worker for task 40 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.737 Executor task launch worker for task 40 DEBUG TaskMemoryManager: Task 40 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@16257b9f 20/07/01 11:33:15.739 Executor task launch worker for task 40 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.739 Executor task launch worker for task 40 DEBUG TaskMemoryManager: Task 40 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@16257b9f 20/07/01 11:33:15.740 Executor task launch worker for task 40 INFO Executor: Finished task 26.0 in stage 2.0 (TID 40). 3346 bytes result sent to driver 20/07/01 11:33:15.741 Executor task launch worker for task 40 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.746 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 41 20/07/01 11:33:15.747 Executor task launch worker for task 41 INFO Executor: Running task 27.0 in stage 2.0 (TID 41) 20/07/01 11:33:15.748 Executor task launch worker for task 41 DEBUG Executor: Task 41's epoch is 1 20/07/01 11:33:15.748 Executor task launch worker for task 41 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.751 Executor task launch worker for task 41 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.752 Executor task launch worker for task 41 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 27-28 20/07/01 11:33:15.752 Executor task launch worker for task 41 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.752 Executor task launch worker for task 41 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.753 Executor task launch worker for task 41 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.753 Executor task launch worker for task 41 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.753 Executor task launch worker for task 41 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.755 Executor task launch worker for task 41 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.757 Executor task launch worker for task 41 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.758 Executor task launch worker for task 41 DEBUG TaskMemoryManager: Task 41 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@428d37aa 20/07/01 11:33:15.760 Executor task launch worker for task 41 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.761 Executor task launch worker for task 41 DEBUG TaskMemoryManager: Task 41 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@428d37aa 20/07/01 11:33:15.762 Executor task launch worker for task 41 INFO Executor: Finished task 27.0 in stage 2.0 (TID 41). 3346 bytes result sent to driver 20/07/01 11:33:15.763 Executor task launch worker for task 41 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.767 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 42 20/07/01 11:33:15.768 Executor task launch worker for task 42 INFO Executor: Running task 28.0 in stage 2.0 (TID 42) 20/07/01 11:33:15.769 Executor task launch worker for task 42 DEBUG Executor: Task 42's epoch is 1 20/07/01 11:33:15.769 Executor task launch worker for task 42 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.772 Executor task launch worker for task 42 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.772 Executor task launch worker for task 42 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 28-29 20/07/01 11:33:15.773 Executor task launch worker for task 42 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.773 Executor task launch worker for task 42 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.773 Executor task launch worker for task 42 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.773 Executor task launch worker for task 42 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.773 Executor task launch worker for task 42 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.776 Executor task launch worker for task 42 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.779 Executor task launch worker for task 42 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.779 Executor task launch worker for task 42 DEBUG TaskMemoryManager: Task 42 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@51651dd8 20/07/01 11:33:15.781 Executor task launch worker for task 42 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.782 Executor task launch worker for task 42 DEBUG TaskMemoryManager: Task 42 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@51651dd8 20/07/01 11:33:15.783 Executor task launch worker for task 42 INFO Executor: Finished task 28.0 in stage 2.0 (TID 42). 3346 bytes result sent to driver 20/07/01 11:33:15.783 Executor task launch worker for task 42 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.789 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 43 20/07/01 11:33:15.789 Executor task launch worker for task 43 INFO Executor: Running task 29.0 in stage 2.0 (TID 43) 20/07/01 11:33:15.790 Executor task launch worker for task 43 DEBUG Executor: Task 43's epoch is 1 20/07/01 11:33:15.790 Executor task launch worker for task 43 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.793 Executor task launch worker for task 43 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.793 Executor task launch worker for task 43 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 29-30 20/07/01 11:33:15.794 Executor task launch worker for task 43 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.794 Executor task launch worker for task 43 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.794 Executor task launch worker for task 43 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.794 Executor task launch worker for task 43 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.794 Executor task launch worker for task 43 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.796 Executor task launch worker for task 43 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.799 Executor task launch worker for task 43 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.800 Executor task launch worker for task 43 DEBUG TaskMemoryManager: Task 43 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@79245d07 20/07/01 11:33:15.803 Executor task launch worker for task 43 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.803 Executor task launch worker for task 43 DEBUG TaskMemoryManager: Task 43 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@79245d07 20/07/01 11:33:15.806 Executor task launch worker for task 43 INFO Executor: Finished task 29.0 in stage 2.0 (TID 43). 3346 bytes result sent to driver 20/07/01 11:33:15.806 Executor task launch worker for task 43 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.811 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 44 20/07/01 11:33:15.811 Executor task launch worker for task 44 INFO Executor: Running task 30.0 in stage 2.0 (TID 44) 20/07/01 11:33:15.812 Executor task launch worker for task 44 DEBUG Executor: Task 44's epoch is 1 20/07/01 11:33:15.812 Executor task launch worker for task 44 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.816 Executor task launch worker for task 44 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.816 Executor task launch worker for task 44 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 30-31 20/07/01 11:33:15.816 Executor task launch worker for task 44 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.817 Executor task launch worker for task 44 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.817 Executor task launch worker for task 44 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.817 Executor task launch worker for task 44 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.817 Executor task launch worker for task 44 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.819 Executor task launch worker for task 44 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.822 Executor task launch worker for task 44 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.822 Executor task launch worker for task 44 DEBUG TaskMemoryManager: Task 44 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6bbd7202 20/07/01 11:33:15.824 Executor task launch worker for task 44 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.824 Executor task launch worker for task 44 DEBUG TaskMemoryManager: Task 44 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6bbd7202 20/07/01 11:33:15.825 Executor task launch worker for task 44 INFO Executor: Finished task 30.0 in stage 2.0 (TID 44). 3346 bytes result sent to driver 20/07/01 11:33:15.825 Executor task launch worker for task 44 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.830 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 45 20/07/01 11:33:15.830 Executor task launch worker for task 45 INFO Executor: Running task 31.0 in stage 2.0 (TID 45) 20/07/01 11:33:15.831 Executor task launch worker for task 45 DEBUG Executor: Task 45's epoch is 1 20/07/01 11:33:15.831 Executor task launch worker for task 45 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.834 Executor task launch worker for task 45 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.834 Executor task launch worker for task 45 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 31-32 20/07/01 11:33:15.835 Executor task launch worker for task 45 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.835 Executor task launch worker for task 45 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.835 Executor task launch worker for task 45 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.835 Executor task launch worker for task 45 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.835 Executor task launch worker for task 45 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.837 Executor task launch worker for task 45 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.839 Executor task launch worker for task 45 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.839 Executor task launch worker for task 45 DEBUG TaskMemoryManager: Task 45 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@590b417 20/07/01 11:33:15.841 Executor task launch worker for task 45 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.841 Executor task launch worker for task 45 DEBUG TaskMemoryManager: Task 45 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@590b417 20/07/01 11:33:15.842 Executor task launch worker for task 45 INFO Executor: Finished task 31.0 in stage 2.0 (TID 45). 3346 bytes result sent to driver 20/07/01 11:33:15.843 Executor task launch worker for task 45 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.847 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 46 20/07/01 11:33:15.848 Executor task launch worker for task 46 INFO Executor: Running task 32.0 in stage 2.0 (TID 46) 20/07/01 11:33:15.848 Executor task launch worker for task 46 DEBUG Executor: Task 46's epoch is 1 20/07/01 11:33:15.848 Executor task launch worker for task 46 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.851 Executor task launch worker for task 46 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.851 Executor task launch worker for task 46 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 32-33 20/07/01 11:33:15.851 Executor task launch worker for task 46 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.852 Executor task launch worker for task 46 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.852 Executor task launch worker for task 46 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.852 Executor task launch worker for task 46 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.852 Executor task launch worker for task 46 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.853 Executor task launch worker for task 46 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.855 Executor task launch worker for task 46 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.855 Executor task launch worker for task 46 DEBUG TaskMemoryManager: Task 46 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@1b740707 20/07/01 11:33:15.857 Executor task launch worker for task 46 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.857 Executor task launch worker for task 46 DEBUG TaskMemoryManager: Task 46 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@1b740707 20/07/01 11:33:15.858 Executor task launch worker for task 46 INFO Executor: Finished task 32.0 in stage 2.0 (TID 46). 3346 bytes result sent to driver 20/07/01 11:33:15.858 Executor task launch worker for task 46 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.863 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 47 20/07/01 11:33:15.863 Executor task launch worker for task 47 INFO Executor: Running task 33.0 in stage 2.0 (TID 47) 20/07/01 11:33:15.864 Executor task launch worker for task 47 DEBUG Executor: Task 47's epoch is 1 20/07/01 11:33:15.864 Executor task launch worker for task 47 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.868 Executor task launch worker for task 47 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.868 Executor task launch worker for task 47 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 33-34 20/07/01 11:33:15.869 Executor task launch worker for task 47 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.869 Executor task launch worker for task 47 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.869 Executor task launch worker for task 47 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.869 Executor task launch worker for task 47 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.869 Executor task launch worker for task 47 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.871 Executor task launch worker for task 47 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.874 Executor task launch worker for task 47 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.874 Executor task launch worker for task 47 DEBUG TaskMemoryManager: Task 47 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4be3c621 20/07/01 11:33:15.876 Executor task launch worker for task 47 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.877 Executor task launch worker for task 47 DEBUG TaskMemoryManager: Task 47 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4be3c621 20/07/01 11:33:15.878 Executor task launch worker for task 47 INFO Executor: Finished task 33.0 in stage 2.0 (TID 47). 3346 bytes result sent to driver 20/07/01 11:33:15.878 Executor task launch worker for task 47 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.883 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 48 20/07/01 11:33:15.883 Executor task launch worker for task 48 INFO Executor: Running task 34.0 in stage 2.0 (TID 48) 20/07/01 11:33:15.884 Executor task launch worker for task 48 DEBUG Executor: Task 48's epoch is 1 20/07/01 11:33:15.884 Executor task launch worker for task 48 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.887 Executor task launch worker for task 48 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.887 Executor task launch worker for task 48 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 34-35 20/07/01 11:33:15.888 Executor task launch worker for task 48 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.888 Executor task launch worker for task 48 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.888 Executor task launch worker for task 48 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.888 Executor task launch worker for task 48 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.888 Executor task launch worker for task 48 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.890 Executor task launch worker for task 48 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.892 Executor task launch worker for task 48 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.893 Executor task launch worker for task 48 DEBUG TaskMemoryManager: Task 48 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@26bfaab4 20/07/01 11:33:15.895 Executor task launch worker for task 48 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.895 Executor task launch worker for task 48 DEBUG TaskMemoryManager: Task 48 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@26bfaab4 20/07/01 11:33:15.896 Executor task launch worker for task 48 INFO Executor: Finished task 34.0 in stage 2.0 (TID 48). 3346 bytes result sent to driver 20/07/01 11:33:15.897 Executor task launch worker for task 48 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.903 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 49 20/07/01 11:33:15.903 Executor task launch worker for task 49 INFO Executor: Running task 35.0 in stage 2.0 (TID 49) 20/07/01 11:33:15.904 Executor task launch worker for task 49 DEBUG Executor: Task 49's epoch is 1 20/07/01 11:33:15.904 Executor task launch worker for task 49 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.907 Executor task launch worker for task 49 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.907 Executor task launch worker for task 49 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 35-36 20/07/01 11:33:15.908 Executor task launch worker for task 49 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.908 Executor task launch worker for task 49 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.908 Executor task launch worker for task 49 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.908 Executor task launch worker for task 49 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.908 Executor task launch worker for task 49 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.910 Executor task launch worker for task 49 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.913 Executor task launch worker for task 49 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.914 Executor task launch worker for task 49 DEBUG TaskMemoryManager: Task 49 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4078e73f 20/07/01 11:33:15.915 Executor task launch worker for task 49 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.915 Executor task launch worker for task 49 DEBUG TaskMemoryManager: Task 49 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4078e73f 20/07/01 11:33:15.917 Executor task launch worker for task 49 INFO Executor: Finished task 35.0 in stage 2.0 (TID 49). 3346 bytes result sent to driver 20/07/01 11:33:15.917 Executor task launch worker for task 49 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.921 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 50 20/07/01 11:33:15.922 Executor task launch worker for task 50 INFO Executor: Running task 36.0 in stage 2.0 (TID 50) 20/07/01 11:33:15.922 Executor task launch worker for task 50 DEBUG Executor: Task 50's epoch is 1 20/07/01 11:33:15.923 Executor task launch worker for task 50 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.925 Executor task launch worker for task 50 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.925 Executor task launch worker for task 50 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 36-37 20/07/01 11:33:15.926 Executor task launch worker for task 50 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.926 Executor task launch worker for task 50 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.926 Executor task launch worker for task 50 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.926 Executor task launch worker for task 50 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.926 Executor task launch worker for task 50 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.929 Executor task launch worker for task 50 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.931 Executor task launch worker for task 50 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.932 Executor task launch worker for task 50 DEBUG TaskMemoryManager: Task 50 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@5958b6c6 20/07/01 11:33:15.933 Executor task launch worker for task 50 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.934 Executor task launch worker for task 50 DEBUG TaskMemoryManager: Task 50 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@5958b6c6 20/07/01 11:33:15.935 Executor task launch worker for task 50 INFO Executor: Finished task 36.0 in stage 2.0 (TID 50). 3346 bytes result sent to driver 20/07/01 11:33:15.935 Executor task launch worker for task 50 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.940 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 51 20/07/01 11:33:15.941 Executor task launch worker for task 51 INFO Executor: Running task 37.0 in stage 2.0 (TID 51) 20/07/01 11:33:15.941 Executor task launch worker for task 51 DEBUG Executor: Task 51's epoch is 1 20/07/01 11:33:15.941 Executor task launch worker for task 51 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.944 Executor task launch worker for task 51 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.945 Executor task launch worker for task 51 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 37-38 20/07/01 11:33:15.945 Executor task launch worker for task 51 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.945 Executor task launch worker for task 51 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.945 Executor task launch worker for task 51 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.945 Executor task launch worker for task 51 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.945 Executor task launch worker for task 51 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.948 Executor task launch worker for task 51 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.949 Executor task launch worker for task 51 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.949 Executor task launch worker for task 51 DEBUG TaskMemoryManager: Task 51 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@3e39dabe 20/07/01 11:33:15.951 Executor task launch worker for task 51 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.951 Executor task launch worker for task 51 DEBUG TaskMemoryManager: Task 51 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@3e39dabe 20/07/01 11:33:15.952 Executor task launch worker for task 51 INFO Executor: Finished task 37.0 in stage 2.0 (TID 51). 3346 bytes result sent to driver 20/07/01 11:33:15.953 Executor task launch worker for task 51 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.957 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 52 20/07/01 11:33:15.957 Executor task launch worker for task 52 INFO Executor: Running task 38.0 in stage 2.0 (TID 52) 20/07/01 11:33:15.958 Executor task launch worker for task 52 DEBUG Executor: Task 52's epoch is 1 20/07/01 11:33:15.958 Executor task launch worker for task 52 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.960 Executor task launch worker for task 52 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.960 Executor task launch worker for task 52 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 38-39 20/07/01 11:33:15.960 Executor task launch worker for task 52 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.960 Executor task launch worker for task 52 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.960 Executor task launch worker for task 52 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.960 Executor task launch worker for task 52 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.961 Executor task launch worker for task 52 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.962 Executor task launch worker for task 52 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.964 Executor task launch worker for task 52 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.964 Executor task launch worker for task 52 DEBUG TaskMemoryManager: Task 52 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@79a5f2a0 20/07/01 11:33:15.965 Executor task launch worker for task 52 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.965 Executor task launch worker for task 52 DEBUG TaskMemoryManager: Task 52 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@79a5f2a0 20/07/01 11:33:15.966 Executor task launch worker for task 52 INFO Executor: Finished task 38.0 in stage 2.0 (TID 52). 3346 bytes result sent to driver 20/07/01 11:33:15.966 Executor task launch worker for task 52 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.971 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 53 20/07/01 11:33:15.971 Executor task launch worker for task 53 INFO Executor: Running task 39.0 in stage 2.0 (TID 53) 20/07/01 11:33:15.971 Executor task launch worker for task 53 DEBUG Executor: Task 53's epoch is 1 20/07/01 11:33:15.971 Executor task launch worker for task 53 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.974 Executor task launch worker for task 53 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.974 Executor task launch worker for task 53 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 39-40 20/07/01 11:33:15.974 Executor task launch worker for task 53 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.974 Executor task launch worker for task 53 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.974 Executor task launch worker for task 53 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.974 Executor task launch worker for task 53 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.974 Executor task launch worker for task 53 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.976 Executor task launch worker for task 53 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.978 Executor task launch worker for task 53 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.978 Executor task launch worker for task 53 DEBUG TaskMemoryManager: Task 53 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@52b52ffc 20/07/01 11:33:15.979 Executor task launch worker for task 53 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.979 Executor task launch worker for task 53 DEBUG TaskMemoryManager: Task 53 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@52b52ffc 20/07/01 11:33:15.980 Executor task launch worker for task 53 INFO Executor: Finished task 39.0 in stage 2.0 (TID 53). 3346 bytes result sent to driver 20/07/01 11:33:15.981 Executor task launch worker for task 53 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.984 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 54 20/07/01 11:33:15.985 Executor task launch worker for task 54 INFO Executor: Running task 40.0 in stage 2.0 (TID 54) 20/07/01 11:33:15.985 Executor task launch worker for task 54 DEBUG Executor: Task 54's epoch is 1 20/07/01 11:33:15.985 Executor task launch worker for task 54 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.987 Executor task launch worker for task 54 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.987 Executor task launch worker for task 54 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 40-41 20/07/01 11:33:15.988 Executor task launch worker for task 54 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:15.988 Executor task launch worker for task 54 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:15.988 Executor task launch worker for task 54 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:15.988 Executor task launch worker for task 54 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:15.988 Executor task launch worker for task 54 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:15.989 Executor task launch worker for task 54 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:15.991 Executor task launch worker for task 54 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:15.991 Executor task launch worker for task 54 DEBUG TaskMemoryManager: Task 54 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@d92a7c7 20/07/01 11:33:15.992 Executor task launch worker for task 54 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:15.992 Executor task launch worker for task 54 DEBUG TaskMemoryManager: Task 54 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@d92a7c7 20/07/01 11:33:15.993 Executor task launch worker for task 54 INFO Executor: Finished task 40.0 in stage 2.0 (TID 54). 3346 bytes result sent to driver 20/07/01 11:33:15.993 Executor task launch worker for task 54 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:15.997 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 55 20/07/01 11:33:15.997 Executor task launch worker for task 55 INFO Executor: Running task 41.0 in stage 2.0 (TID 55) 20/07/01 11:33:15.997 Executor task launch worker for task 55 DEBUG Executor: Task 55's epoch is 1 20/07/01 11:33:15.997 Executor task launch worker for task 55 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:15.999 Executor task launch worker for task 55 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:15.999 Executor task launch worker for task 55 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 41-42 20/07/01 11:33:16.000 Executor task launch worker for task 55 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.000 Executor task launch worker for task 55 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.000 Executor task launch worker for task 55 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.000 Executor task launch worker for task 55 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.000 Executor task launch worker for task 55 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.001 Executor task launch worker for task 55 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.002 Executor task launch worker for task 55 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.003 Executor task launch worker for task 55 DEBUG TaskMemoryManager: Task 55 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@d9b9fc9 20/07/01 11:33:16.004 Executor task launch worker for task 55 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.004 Executor task launch worker for task 55 DEBUG TaskMemoryManager: Task 55 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@d9b9fc9 20/07/01 11:33:16.004 Executor task launch worker for task 55 INFO Executor: Finished task 41.0 in stage 2.0 (TID 55). 3346 bytes result sent to driver 20/07/01 11:33:16.005 Executor task launch worker for task 55 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.008 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 56 20/07/01 11:33:16.008 Executor task launch worker for task 56 INFO Executor: Running task 42.0 in stage 2.0 (TID 56) 20/07/01 11:33:16.009 Executor task launch worker for task 56 DEBUG Executor: Task 56's epoch is 1 20/07/01 11:33:16.009 Executor task launch worker for task 56 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.011 Executor task launch worker for task 56 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.011 Executor task launch worker for task 56 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 42-43 20/07/01 11:33:16.011 Executor task launch worker for task 56 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.011 Executor task launch worker for task 56 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.011 Executor task launch worker for task 56 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.011 Executor task launch worker for task 56 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.011 Executor task launch worker for task 56 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.012 Executor task launch worker for task 56 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.014 Executor task launch worker for task 56 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.014 Executor task launch worker for task 56 DEBUG TaskMemoryManager: Task 56 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@82083b3 20/07/01 11:33:16.015 Executor task launch worker for task 56 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.015 Executor task launch worker for task 56 DEBUG TaskMemoryManager: Task 56 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@82083b3 20/07/01 11:33:16.016 Executor task launch worker for task 56 INFO Executor: Finished task 42.0 in stage 2.0 (TID 56). 3346 bytes result sent to driver 20/07/01 11:33:16.016 Executor task launch worker for task 56 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.019 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 57 20/07/01 11:33:16.019 Executor task launch worker for task 57 INFO Executor: Running task 44.0 in stage 2.0 (TID 57) 20/07/01 11:33:16.020 Executor task launch worker for task 57 DEBUG Executor: Task 57's epoch is 1 20/07/01 11:33:16.020 Executor task launch worker for task 57 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.022 Executor task launch worker for task 57 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.022 Executor task launch worker for task 57 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 44-45 20/07/01 11:33:16.022 Executor task launch worker for task 57 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.022 Executor task launch worker for task 57 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.022 Executor task launch worker for task 57 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.022 Executor task launch worker for task 57 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.022 Executor task launch worker for task 57 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.024 Executor task launch worker for task 57 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.025 Executor task launch worker for task 57 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.025 Executor task launch worker for task 57 DEBUG TaskMemoryManager: Task 57 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@39549c92 20/07/01 11:33:16.026 Executor task launch worker for task 57 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.026 Executor task launch worker for task 57 DEBUG TaskMemoryManager: Task 57 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@39549c92 20/07/01 11:33:16.027 Executor task launch worker for task 57 INFO Executor: Finished task 44.0 in stage 2.0 (TID 57). 3346 bytes result sent to driver 20/07/01 11:33:16.027 Executor task launch worker for task 57 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.030 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 58 20/07/01 11:33:16.030 Executor task launch worker for task 58 INFO Executor: Running task 45.0 in stage 2.0 (TID 58) 20/07/01 11:33:16.031 Executor task launch worker for task 58 DEBUG Executor: Task 58's epoch is 1 20/07/01 11:33:16.031 Executor task launch worker for task 58 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.033 Executor task launch worker for task 58 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.033 Executor task launch worker for task 58 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 45-46 20/07/01 11:33:16.033 Executor task launch worker for task 58 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.033 Executor task launch worker for task 58 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.033 Executor task launch worker for task 58 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.033 Executor task launch worker for task 58 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.033 Executor task launch worker for task 58 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.035 Executor task launch worker for task 58 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.036 Executor task launch worker for task 58 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.036 Executor task launch worker for task 58 DEBUG TaskMemoryManager: Task 58 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4c76fba3 20/07/01 11:33:16.037 Executor task launch worker for task 58 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.037 Executor task launch worker for task 58 DEBUG TaskMemoryManager: Task 58 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4c76fba3 20/07/01 11:33:16.038 Executor task launch worker for task 58 INFO Executor: Finished task 45.0 in stage 2.0 (TID 58). 3346 bytes result sent to driver 20/07/01 11:33:16.038 Executor task launch worker for task 58 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.041 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 59 20/07/01 11:33:16.042 Executor task launch worker for task 59 INFO Executor: Running task 46.0 in stage 2.0 (TID 59) 20/07/01 11:33:16.042 Executor task launch worker for task 59 DEBUG Executor: Task 59's epoch is 1 20/07/01 11:33:16.042 Executor task launch worker for task 59 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.044 Executor task launch worker for task 59 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.044 Executor task launch worker for task 59 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 46-47 20/07/01 11:33:16.045 Executor task launch worker for task 59 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.045 Executor task launch worker for task 59 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.045 Executor task launch worker for task 59 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.045 Executor task launch worker for task 59 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.045 Executor task launch worker for task 59 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.046 Executor task launch worker for task 59 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.048 Executor task launch worker for task 59 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.048 Executor task launch worker for task 59 DEBUG TaskMemoryManager: Task 59 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@31d005a0 20/07/01 11:33:16.049 Executor task launch worker for task 59 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.049 Executor task launch worker for task 59 DEBUG TaskMemoryManager: Task 59 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@31d005a0 20/07/01 11:33:16.050 Executor task launch worker for task 59 INFO Executor: Finished task 46.0 in stage 2.0 (TID 59). 3346 bytes result sent to driver 20/07/01 11:33:16.050 Executor task launch worker for task 59 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.053 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 60 20/07/01 11:33:16.054 Executor task launch worker for task 60 INFO Executor: Running task 47.0 in stage 2.0 (TID 60) 20/07/01 11:33:16.054 Executor task launch worker for task 60 DEBUG Executor: Task 60's epoch is 1 20/07/01 11:33:16.054 Executor task launch worker for task 60 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.056 Executor task launch worker for task 60 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.056 Executor task launch worker for task 60 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 47-48 20/07/01 11:33:16.056 Executor task launch worker for task 60 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.057 Executor task launch worker for task 60 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.057 Executor task launch worker for task 60 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.057 Executor task launch worker for task 60 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.057 Executor task launch worker for task 60 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.058 Executor task launch worker for task 60 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.060 Executor task launch worker for task 60 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.060 Executor task launch worker for task 60 DEBUG TaskMemoryManager: Task 60 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@2824aa4f 20/07/01 11:33:16.061 Executor task launch worker for task 60 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.061 Executor task launch worker for task 60 DEBUG TaskMemoryManager: Task 60 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@2824aa4f 20/07/01 11:33:16.062 Executor task launch worker for task 60 INFO Executor: Finished task 47.0 in stage 2.0 (TID 60). 3346 bytes result sent to driver 20/07/01 11:33:16.062 Executor task launch worker for task 60 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.066 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 61 20/07/01 11:33:16.066 Executor task launch worker for task 61 INFO Executor: Running task 48.0 in stage 2.0 (TID 61) 20/07/01 11:33:16.066 Executor task launch worker for task 61 DEBUG Executor: Task 61's epoch is 1 20/07/01 11:33:16.067 Executor task launch worker for task 61 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.068 Executor task launch worker for task 61 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.068 Executor task launch worker for task 61 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 48-49 20/07/01 11:33:16.069 Executor task launch worker for task 61 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.069 Executor task launch worker for task 61 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.069 Executor task launch worker for task 61 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.069 Executor task launch worker for task 61 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.069 Executor task launch worker for task 61 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.070 Executor task launch worker for task 61 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.072 Executor task launch worker for task 61 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.072 Executor task launch worker for task 61 DEBUG TaskMemoryManager: Task 61 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@e9f4ce8 20/07/01 11:33:16.073 Executor task launch worker for task 61 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.073 Executor task launch worker for task 61 DEBUG TaskMemoryManager: Task 61 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@e9f4ce8 20/07/01 11:33:16.074 Executor task launch worker for task 61 INFO Executor: Finished task 48.0 in stage 2.0 (TID 61). 3346 bytes result sent to driver 20/07/01 11:33:16.074 Executor task launch worker for task 61 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.077 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 62 20/07/01 11:33:16.078 Executor task launch worker for task 62 INFO Executor: Running task 50.0 in stage 2.0 (TID 62) 20/07/01 11:33:16.078 Executor task launch worker for task 62 DEBUG Executor: Task 62's epoch is 1 20/07/01 11:33:16.078 Executor task launch worker for task 62 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.080 Executor task launch worker for task 62 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.081 Executor task launch worker for task 62 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 50-51 20/07/01 11:33:16.081 Executor task launch worker for task 62 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.081 Executor task launch worker for task 62 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.081 Executor task launch worker for task 62 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.081 Executor task launch worker for task 62 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.081 Executor task launch worker for task 62 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.082 Executor task launch worker for task 62 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.084 Executor task launch worker for task 62 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.084 Executor task launch worker for task 62 DEBUG TaskMemoryManager: Task 62 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@13a1c73a 20/07/01 11:33:16.085 Executor task launch worker for task 62 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.085 Executor task launch worker for task 62 DEBUG TaskMemoryManager: Task 62 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@13a1c73a 20/07/01 11:33:16.086 Executor task launch worker for task 62 INFO Executor: Finished task 50.0 in stage 2.0 (TID 62). 3346 bytes result sent to driver 20/07/01 11:33:16.086 Executor task launch worker for task 62 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.090 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 63 20/07/01 11:33:16.090 Executor task launch worker for task 63 INFO Executor: Running task 52.0 in stage 2.0 (TID 63) 20/07/01 11:33:16.091 Executor task launch worker for task 63 DEBUG Executor: Task 63's epoch is 1 20/07/01 11:33:16.091 Executor task launch worker for task 63 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.093 Executor task launch worker for task 63 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.093 Executor task launch worker for task 63 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 52-53 20/07/01 11:33:16.093 Executor task launch worker for task 63 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.093 Executor task launch worker for task 63 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.093 Executor task launch worker for task 63 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.093 Executor task launch worker for task 63 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.093 Executor task launch worker for task 63 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.095 Executor task launch worker for task 63 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.096 Executor task launch worker for task 63 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.096 Executor task launch worker for task 63 DEBUG TaskMemoryManager: Task 63 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@192efc29 20/07/01 11:33:16.097 Executor task launch worker for task 63 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.098 Executor task launch worker for task 63 DEBUG TaskMemoryManager: Task 63 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@192efc29 20/07/01 11:33:16.099 Executor task launch worker for task 63 INFO Executor: Finished task 52.0 in stage 2.0 (TID 63). 3346 bytes result sent to driver 20/07/01 11:33:16.099 Executor task launch worker for task 63 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.103 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 64 20/07/01 11:33:16.103 Executor task launch worker for task 64 INFO Executor: Running task 53.0 in stage 2.0 (TID 64) 20/07/01 11:33:16.104 Executor task launch worker for task 64 DEBUG Executor: Task 64's epoch is 1 20/07/01 11:33:16.104 Executor task launch worker for task 64 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.106 Executor task launch worker for task 64 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.106 Executor task launch worker for task 64 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 53-54 20/07/01 11:33:16.106 Executor task launch worker for task 64 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.107 Executor task launch worker for task 64 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.107 Executor task launch worker for task 64 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.107 Executor task launch worker for task 64 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.107 Executor task launch worker for task 64 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.108 Executor task launch worker for task 64 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.109 Executor task launch worker for task 64 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.110 Executor task launch worker for task 64 DEBUG TaskMemoryManager: Task 64 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@7c25608e 20/07/01 11:33:16.111 Executor task launch worker for task 64 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.112 Executor task launch worker for task 64 DEBUG TaskMemoryManager: Task 64 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@7c25608e 20/07/01 11:33:16.112 Executor task launch worker for task 64 INFO Executor: Finished task 53.0 in stage 2.0 (TID 64). 3346 bytes result sent to driver 20/07/01 11:33:16.113 Executor task launch worker for task 64 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.116 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 65 20/07/01 11:33:16.116 Executor task launch worker for task 65 INFO Executor: Running task 54.0 in stage 2.0 (TID 65) 20/07/01 11:33:16.117 Executor task launch worker for task 65 DEBUG Executor: Task 65's epoch is 1 20/07/01 11:33:16.117 Executor task launch worker for task 65 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.119 Executor task launch worker for task 65 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.119 Executor task launch worker for task 65 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 54-55 20/07/01 11:33:16.119 Executor task launch worker for task 65 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.120 Executor task launch worker for task 65 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.120 Executor task launch worker for task 65 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.120 Executor task launch worker for task 65 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.120 Executor task launch worker for task 65 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.122 Executor task launch worker for task 65 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.124 Executor task launch worker for task 65 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.125 Executor task launch worker for task 65 DEBUG TaskMemoryManager: Task 65 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@1d360fd3 20/07/01 11:33:16.126 Executor task launch worker for task 65 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.126 Executor task launch worker for task 65 DEBUG TaskMemoryManager: Task 65 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@1d360fd3 20/07/01 11:33:16.127 Executor task launch worker for task 65 INFO Executor: Finished task 54.0 in stage 2.0 (TID 65). 3346 bytes result sent to driver 20/07/01 11:33:16.127 Executor task launch worker for task 65 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.130 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 66 20/07/01 11:33:16.130 Executor task launch worker for task 66 INFO Executor: Running task 55.0 in stage 2.0 (TID 66) 20/07/01 11:33:16.131 Executor task launch worker for task 66 DEBUG Executor: Task 66's epoch is 1 20/07/01 11:33:16.131 Executor task launch worker for task 66 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.133 Executor task launch worker for task 66 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.133 Executor task launch worker for task 66 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 55-56 20/07/01 11:33:16.133 Executor task launch worker for task 66 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.133 Executor task launch worker for task 66 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.133 Executor task launch worker for task 66 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.133 Executor task launch worker for task 66 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.133 Executor task launch worker for task 66 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.135 Executor task launch worker for task 66 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.136 Executor task launch worker for task 66 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.136 Executor task launch worker for task 66 DEBUG TaskMemoryManager: Task 66 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4eef2d6 20/07/01 11:33:16.137 Executor task launch worker for task 66 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.137 Executor task launch worker for task 66 DEBUG TaskMemoryManager: Task 66 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4eef2d6 20/07/01 11:33:16.138 Executor task launch worker for task 66 INFO Executor: Finished task 55.0 in stage 2.0 (TID 66). 3346 bytes result sent to driver 20/07/01 11:33:16.138 Executor task launch worker for task 66 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.142 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 67 20/07/01 11:33:16.142 Executor task launch worker for task 67 INFO Executor: Running task 56.0 in stage 2.0 (TID 67) 20/07/01 11:33:16.143 Executor task launch worker for task 67 DEBUG Executor: Task 67's epoch is 1 20/07/01 11:33:16.143 Executor task launch worker for task 67 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.145 Executor task launch worker for task 67 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.145 Executor task launch worker for task 67 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 56-57 20/07/01 11:33:16.145 Executor task launch worker for task 67 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.145 Executor task launch worker for task 67 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.145 Executor task launch worker for task 67 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.145 Executor task launch worker for task 67 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.145 Executor task launch worker for task 67 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.147 Executor task launch worker for task 67 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.148 Executor task launch worker for task 67 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.149 Executor task launch worker for task 67 DEBUG TaskMemoryManager: Task 67 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6c19e69f 20/07/01 11:33:16.149 Executor task launch worker for task 67 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.150 Executor task launch worker for task 67 DEBUG TaskMemoryManager: Task 67 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6c19e69f 20/07/01 11:33:16.150 Executor task launch worker for task 67 INFO Executor: Finished task 56.0 in stage 2.0 (TID 67). 3346 bytes result sent to driver 20/07/01 11:33:16.151 Executor task launch worker for task 67 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.154 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 68 20/07/01 11:33:16.155 Executor task launch worker for task 68 INFO Executor: Running task 57.0 in stage 2.0 (TID 68) 20/07/01 11:33:16.155 Executor task launch worker for task 68 DEBUG Executor: Task 68's epoch is 1 20/07/01 11:33:16.155 Executor task launch worker for task 68 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.157 Executor task launch worker for task 68 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.157 Executor task launch worker for task 68 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 57-58 20/07/01 11:33:16.158 Executor task launch worker for task 68 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.158 Executor task launch worker for task 68 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.158 Executor task launch worker for task 68 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.158 Executor task launch worker for task 68 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.158 Executor task launch worker for task 68 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.160 Executor task launch worker for task 68 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.161 Executor task launch worker for task 68 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.161 Executor task launch worker for task 68 DEBUG TaskMemoryManager: Task 68 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@7fb50efd 20/07/01 11:33:16.162 Executor task launch worker for task 68 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.162 Executor task launch worker for task 68 DEBUG TaskMemoryManager: Task 68 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@7fb50efd 20/07/01 11:33:16.163 Executor task launch worker for task 68 INFO Executor: Finished task 57.0 in stage 2.0 (TID 68). 3346 bytes result sent to driver 20/07/01 11:33:16.163 Executor task launch worker for task 68 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.167 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 69 20/07/01 11:33:16.168 Executor task launch worker for task 69 INFO Executor: Running task 58.0 in stage 2.0 (TID 69) 20/07/01 11:33:16.168 Executor task launch worker for task 69 DEBUG Executor: Task 69's epoch is 1 20/07/01 11:33:16.168 Executor task launch worker for task 69 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.170 Executor task launch worker for task 69 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.170 Executor task launch worker for task 69 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 58-59 20/07/01 11:33:16.170 Executor task launch worker for task 69 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.171 Executor task launch worker for task 69 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.171 Executor task launch worker for task 69 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.171 Executor task launch worker for task 69 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.171 Executor task launch worker for task 69 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.172 Executor task launch worker for task 69 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.174 Executor task launch worker for task 69 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.174 Executor task launch worker for task 69 DEBUG TaskMemoryManager: Task 69 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@1e41691c 20/07/01 11:33:16.175 Executor task launch worker for task 69 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.175 Executor task launch worker for task 69 DEBUG TaskMemoryManager: Task 69 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@1e41691c 20/07/01 11:33:16.176 Executor task launch worker for task 69 INFO Executor: Finished task 58.0 in stage 2.0 (TID 69). 3346 bytes result sent to driver 20/07/01 11:33:16.176 Executor task launch worker for task 69 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.179 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 70 20/07/01 11:33:16.179 Executor task launch worker for task 70 INFO Executor: Running task 59.0 in stage 2.0 (TID 70) 20/07/01 11:33:16.180 Executor task launch worker for task 70 DEBUG Executor: Task 70's epoch is 1 20/07/01 11:33:16.180 Executor task launch worker for task 70 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.181 Executor task launch worker for task 70 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.181 Executor task launch worker for task 70 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 59-60 20/07/01 11:33:16.182 Executor task launch worker for task 70 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.182 Executor task launch worker for task 70 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.182 Executor task launch worker for task 70 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.182 Executor task launch worker for task 70 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.182 Executor task launch worker for task 70 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.183 Executor task launch worker for task 70 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.185 Executor task launch worker for task 70 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.185 Executor task launch worker for task 70 DEBUG TaskMemoryManager: Task 70 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@11ffabe4 20/07/01 11:33:16.186 Executor task launch worker for task 70 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.186 Executor task launch worker for task 70 DEBUG TaskMemoryManager: Task 70 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@11ffabe4 20/07/01 11:33:16.187 Executor task launch worker for task 70 INFO Executor: Finished task 59.0 in stage 2.0 (TID 70). 3346 bytes result sent to driver 20/07/01 11:33:16.187 Executor task launch worker for task 70 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.190 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 71 20/07/01 11:33:16.190 Executor task launch worker for task 71 INFO Executor: Running task 60.0 in stage 2.0 (TID 71) 20/07/01 11:33:16.191 Executor task launch worker for task 71 DEBUG Executor: Task 71's epoch is 1 20/07/01 11:33:16.191 Executor task launch worker for task 71 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.193 Executor task launch worker for task 71 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.193 Executor task launch worker for task 71 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 60-61 20/07/01 11:33:16.193 Executor task launch worker for task 71 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.193 Executor task launch worker for task 71 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.193 Executor task launch worker for task 71 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.193 Executor task launch worker for task 71 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.193 Executor task launch worker for task 71 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.195 Executor task launch worker for task 71 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.196 Executor task launch worker for task 71 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.196 Executor task launch worker for task 71 DEBUG TaskMemoryManager: Task 71 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@5a1da231 20/07/01 11:33:16.197 Executor task launch worker for task 71 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.197 Executor task launch worker for task 71 DEBUG TaskMemoryManager: Task 71 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@5a1da231 20/07/01 11:33:16.198 Executor task launch worker for task 71 INFO Executor: Finished task 60.0 in stage 2.0 (TID 71). 3346 bytes result sent to driver 20/07/01 11:33:16.198 Executor task launch worker for task 71 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.201 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 72 20/07/01 11:33:16.201 Executor task launch worker for task 72 INFO Executor: Running task 61.0 in stage 2.0 (TID 72) 20/07/01 11:33:16.202 Executor task launch worker for task 72 DEBUG Executor: Task 72's epoch is 1 20/07/01 11:33:16.202 Executor task launch worker for task 72 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.204 Executor task launch worker for task 72 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.204 Executor task launch worker for task 72 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 61-62 20/07/01 11:33:16.204 Executor task launch worker for task 72 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.204 Executor task launch worker for task 72 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.204 Executor task launch worker for task 72 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.204 Executor task launch worker for task 72 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.204 Executor task launch worker for task 72 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.206 Executor task launch worker for task 72 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.207 Executor task launch worker for task 72 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.207 Executor task launch worker for task 72 DEBUG TaskMemoryManager: Task 72 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@3935571 20/07/01 11:33:16.208 Executor task launch worker for task 72 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.208 Executor task launch worker for task 72 DEBUG TaskMemoryManager: Task 72 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@3935571 20/07/01 11:33:16.209 Executor task launch worker for task 72 INFO Executor: Finished task 61.0 in stage 2.0 (TID 72). 3346 bytes result sent to driver 20/07/01 11:33:16.209 Executor task launch worker for task 72 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.213 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 73 20/07/01 11:33:16.213 Executor task launch worker for task 73 INFO Executor: Running task 62.0 in stage 2.0 (TID 73) 20/07/01 11:33:16.214 Executor task launch worker for task 73 DEBUG Executor: Task 73's epoch is 1 20/07/01 11:33:16.214 Executor task launch worker for task 73 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.216 Executor task launch worker for task 73 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.216 Executor task launch worker for task 73 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 62-63 20/07/01 11:33:16.216 Executor task launch worker for task 73 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.217 Executor task launch worker for task 73 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.217 Executor task launch worker for task 73 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.217 Executor task launch worker for task 73 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.217 Executor task launch worker for task 73 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.218 Executor task launch worker for task 73 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.220 Executor task launch worker for task 73 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.220 Executor task launch worker for task 73 DEBUG TaskMemoryManager: Task 73 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@411ead67 20/07/01 11:33:16.221 Executor task launch worker for task 73 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.221 Executor task launch worker for task 73 DEBUG TaskMemoryManager: Task 73 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@411ead67 20/07/01 11:33:16.222 Executor task launch worker for task 73 INFO Executor: Finished task 62.0 in stage 2.0 (TID 73). 3346 bytes result sent to driver 20/07/01 11:33:16.222 Executor task launch worker for task 73 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.226 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 74 20/07/01 11:33:16.227 Executor task launch worker for task 74 INFO Executor: Running task 63.0 in stage 2.0 (TID 74) 20/07/01 11:33:16.227 Executor task launch worker for task 74 DEBUG Executor: Task 74's epoch is 1 20/07/01 11:33:16.227 Executor task launch worker for task 74 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.229 Executor task launch worker for task 74 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.230 Executor task launch worker for task 74 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 63-64 20/07/01 11:33:16.230 Executor task launch worker for task 74 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.230 Executor task launch worker for task 74 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.230 Executor task launch worker for task 74 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.230 Executor task launch worker for task 74 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.230 Executor task launch worker for task 74 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.232 Executor task launch worker for task 74 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.233 Executor task launch worker for task 74 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.233 Executor task launch worker for task 74 DEBUG TaskMemoryManager: Task 74 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6cd6e0c7 20/07/01 11:33:16.234 Executor task launch worker for task 74 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.235 Executor task launch worker for task 74 DEBUG TaskMemoryManager: Task 74 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6cd6e0c7 20/07/01 11:33:16.235 Executor task launch worker for task 74 INFO Executor: Finished task 63.0 in stage 2.0 (TID 74). 3346 bytes result sent to driver 20/07/01 11:33:16.236 Executor task launch worker for task 74 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.240 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 75 20/07/01 11:33:16.240 Executor task launch worker for task 75 INFO Executor: Running task 64.0 in stage 2.0 (TID 75) 20/07/01 11:33:16.241 Executor task launch worker for task 75 DEBUG Executor: Task 75's epoch is 1 20/07/01 11:33:16.241 Executor task launch worker for task 75 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.242 Executor task launch worker for task 75 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.242 Executor task launch worker for task 75 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 64-65 20/07/01 11:33:16.243 Executor task launch worker for task 75 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.243 Executor task launch worker for task 75 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.243 Executor task launch worker for task 75 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.243 Executor task launch worker for task 75 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.243 Executor task launch worker for task 75 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.244 Executor task launch worker for task 75 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.246 Executor task launch worker for task 75 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.246 Executor task launch worker for task 75 DEBUG TaskMemoryManager: Task 75 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@752fe1a6 20/07/01 11:33:16.247 Executor task launch worker for task 75 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.247 Executor task launch worker for task 75 DEBUG TaskMemoryManager: Task 75 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@752fe1a6 20/07/01 11:33:16.248 Executor task launch worker for task 75 INFO Executor: Finished task 64.0 in stage 2.0 (TID 75). 3346 bytes result sent to driver 20/07/01 11:33:16.248 Executor task launch worker for task 75 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.251 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 76 20/07/01 11:33:16.252 Executor task launch worker for task 76 INFO Executor: Running task 65.0 in stage 2.0 (TID 76) 20/07/01 11:33:16.252 Executor task launch worker for task 76 DEBUG Executor: Task 76's epoch is 1 20/07/01 11:33:16.252 Executor task launch worker for task 76 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.254 Executor task launch worker for task 76 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.254 Executor task launch worker for task 76 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 65-66 20/07/01 11:33:16.254 Executor task launch worker for task 76 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.254 Executor task launch worker for task 76 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.254 Executor task launch worker for task 76 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.254 Executor task launch worker for task 76 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.254 Executor task launch worker for task 76 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.256 Executor task launch worker for task 76 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.257 Executor task launch worker for task 76 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.257 Executor task launch worker for task 76 DEBUG TaskMemoryManager: Task 76 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@5a14cb91 20/07/01 11:33:16.258 Executor task launch worker for task 76 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.258 Executor task launch worker for task 76 DEBUG TaskMemoryManager: Task 76 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@5a14cb91 20/07/01 11:33:16.259 Executor task launch worker for task 76 INFO Executor: Finished task 65.0 in stage 2.0 (TID 76). 3346 bytes result sent to driver 20/07/01 11:33:16.259 Executor task launch worker for task 76 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.263 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 77 20/07/01 11:33:16.263 Executor task launch worker for task 77 INFO Executor: Running task 67.0 in stage 2.0 (TID 77) 20/07/01 11:33:16.264 Executor task launch worker for task 77 DEBUG Executor: Task 77's epoch is 1 20/07/01 11:33:16.264 Executor task launch worker for task 77 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.266 Executor task launch worker for task 77 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.266 Executor task launch worker for task 77 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 67-68 20/07/01 11:33:16.266 Executor task launch worker for task 77 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.266 Executor task launch worker for task 77 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.266 Executor task launch worker for task 77 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.266 Executor task launch worker for task 77 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.266 Executor task launch worker for task 77 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.267 Executor task launch worker for task 77 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.269 Executor task launch worker for task 77 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.269 Executor task launch worker for task 77 DEBUG TaskMemoryManager: Task 77 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6f022393 20/07/01 11:33:16.270 Executor task launch worker for task 77 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.270 Executor task launch worker for task 77 DEBUG TaskMemoryManager: Task 77 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6f022393 20/07/01 11:33:16.270 Executor task launch worker for task 77 INFO Executor: Finished task 67.0 in stage 2.0 (TID 77). 3346 bytes result sent to driver 20/07/01 11:33:16.271 Executor task launch worker for task 77 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.275 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 78 20/07/01 11:33:16.275 Executor task launch worker for task 78 INFO Executor: Running task 68.0 in stage 2.0 (TID 78) 20/07/01 11:33:16.276 Executor task launch worker for task 78 DEBUG Executor: Task 78's epoch is 1 20/07/01 11:33:16.276 Executor task launch worker for task 78 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.278 Executor task launch worker for task 78 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.278 Executor task launch worker for task 78 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 68-69 20/07/01 11:33:16.278 Executor task launch worker for task 78 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.278 Executor task launch worker for task 78 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.278 Executor task launch worker for task 78 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.278 Executor task launch worker for task 78 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.278 Executor task launch worker for task 78 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.280 Executor task launch worker for task 78 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.281 Executor task launch worker for task 78 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.281 Executor task launch worker for task 78 DEBUG TaskMemoryManager: Task 78 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@19c9d9b3 20/07/01 11:33:16.282 Executor task launch worker for task 78 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.282 Executor task launch worker for task 78 DEBUG TaskMemoryManager: Task 78 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@19c9d9b3 20/07/01 11:33:16.283 Executor task launch worker for task 78 INFO Executor: Finished task 68.0 in stage 2.0 (TID 78). 3346 bytes result sent to driver 20/07/01 11:33:16.283 Executor task launch worker for task 78 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.286 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 79 20/07/01 11:33:16.286 Executor task launch worker for task 79 INFO Executor: Running task 69.0 in stage 2.0 (TID 79) 20/07/01 11:33:16.286 Executor task launch worker for task 79 DEBUG Executor: Task 79's epoch is 1 20/07/01 11:33:16.286 Executor task launch worker for task 79 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.288 Executor task launch worker for task 79 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.288 Executor task launch worker for task 79 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 69-70 20/07/01 11:33:16.288 Executor task launch worker for task 79 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.288 Executor task launch worker for task 79 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.288 Executor task launch worker for task 79 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.288 Executor task launch worker for task 79 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.288 Executor task launch worker for task 79 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.289 Executor task launch worker for task 79 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.290 Executor task launch worker for task 79 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.290 Executor task launch worker for task 79 DEBUG TaskMemoryManager: Task 79 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@3054ceb6 20/07/01 11:33:16.291 Executor task launch worker for task 79 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.291 Executor task launch worker for task 79 DEBUG TaskMemoryManager: Task 79 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@3054ceb6 20/07/01 11:33:16.291 Executor task launch worker for task 79 INFO Executor: Finished task 69.0 in stage 2.0 (TID 79). 3346 bytes result sent to driver 20/07/01 11:33:16.292 Executor task launch worker for task 79 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.294 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 80 20/07/01 11:33:16.294 Executor task launch worker for task 80 INFO Executor: Running task 70.0 in stage 2.0 (TID 80) 20/07/01 11:33:16.295 Executor task launch worker for task 80 DEBUG Executor: Task 80's epoch is 1 20/07/01 11:33:16.295 Executor task launch worker for task 80 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.296 Executor task launch worker for task 80 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.296 Executor task launch worker for task 80 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 70-71 20/07/01 11:33:16.296 Executor task launch worker for task 80 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.296 Executor task launch worker for task 80 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.296 Executor task launch worker for task 80 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.296 Executor task launch worker for task 80 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.296 Executor task launch worker for task 80 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.297 Executor task launch worker for task 80 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.298 Executor task launch worker for task 80 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.298 Executor task launch worker for task 80 DEBUG TaskMemoryManager: Task 80 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@c3134e7 20/07/01 11:33:16.299 Executor task launch worker for task 80 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.299 Executor task launch worker for task 80 DEBUG TaskMemoryManager: Task 80 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@c3134e7 20/07/01 11:33:16.300 Executor task launch worker for task 80 INFO Executor: Finished task 70.0 in stage 2.0 (TID 80). 3346 bytes result sent to driver 20/07/01 11:33:16.300 Executor task launch worker for task 80 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.302 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 81 20/07/01 11:33:16.303 Executor task launch worker for task 81 INFO Executor: Running task 71.0 in stage 2.0 (TID 81) 20/07/01 11:33:16.303 Executor task launch worker for task 81 DEBUG Executor: Task 81's epoch is 1 20/07/01 11:33:16.303 Executor task launch worker for task 81 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.304 Executor task launch worker for task 81 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.304 Executor task launch worker for task 81 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 71-72 20/07/01 11:33:16.305 Executor task launch worker for task 81 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.305 Executor task launch worker for task 81 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.305 Executor task launch worker for task 81 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.305 Executor task launch worker for task 81 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.305 Executor task launch worker for task 81 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.306 Executor task launch worker for task 81 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.307 Executor task launch worker for task 81 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.307 Executor task launch worker for task 81 DEBUG TaskMemoryManager: Task 81 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@53c9653f 20/07/01 11:33:16.308 Executor task launch worker for task 81 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.309 Executor task launch worker for task 81 DEBUG TaskMemoryManager: Task 81 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@53c9653f 20/07/01 11:33:16.309 Executor task launch worker for task 81 INFO Executor: Finished task 71.0 in stage 2.0 (TID 81). 3346 bytes result sent to driver 20/07/01 11:33:16.310 Executor task launch worker for task 81 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.314 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 82 20/07/01 11:33:16.314 Executor task launch worker for task 82 INFO Executor: Running task 72.0 in stage 2.0 (TID 82) 20/07/01 11:33:16.315 Executor task launch worker for task 82 DEBUG Executor: Task 82's epoch is 1 20/07/01 11:33:16.315 Executor task launch worker for task 82 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.317 Executor task launch worker for task 82 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.317 Executor task launch worker for task 82 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 72-73 20/07/01 11:33:16.318 Executor task launch worker for task 82 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.318 Executor task launch worker for task 82 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.318 Executor task launch worker for task 82 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.318 Executor task launch worker for task 82 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.318 Executor task launch worker for task 82 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.320 Executor task launch worker for task 82 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.323 Executor task launch worker for task 82 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.323 Executor task launch worker for task 82 DEBUG TaskMemoryManager: Task 82 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@45401fcd 20/07/01 11:33:16.325 Executor task launch worker for task 82 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.326 Executor task launch worker for task 82 DEBUG TaskMemoryManager: Task 82 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@45401fcd 20/07/01 11:33:16.327 Executor task launch worker for task 82 INFO Executor: Finished task 72.0 in stage 2.0 (TID 82). 3346 bytes result sent to driver 20/07/01 11:33:16.327 Executor task launch worker for task 82 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.331 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 83 20/07/01 11:33:16.331 Executor task launch worker for task 83 INFO Executor: Running task 73.0 in stage 2.0 (TID 83) 20/07/01 11:33:16.331 Executor task launch worker for task 83 DEBUG Executor: Task 83's epoch is 1 20/07/01 11:33:16.331 Executor task launch worker for task 83 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.333 Executor task launch worker for task 83 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.333 Executor task launch worker for task 83 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 73-74 20/07/01 11:33:16.334 Executor task launch worker for task 83 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.334 Executor task launch worker for task 83 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.334 Executor task launch worker for task 83 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.334 Executor task launch worker for task 83 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.334 Executor task launch worker for task 83 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.335 Executor task launch worker for task 83 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.337 Executor task launch worker for task 83 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.337 Executor task launch worker for task 83 DEBUG TaskMemoryManager: Task 83 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@36ef928f 20/07/01 11:33:16.338 Executor task launch worker for task 83 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.338 Executor task launch worker for task 83 DEBUG TaskMemoryManager: Task 83 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@36ef928f 20/07/01 11:33:16.339 Executor task launch worker for task 83 INFO Executor: Finished task 73.0 in stage 2.0 (TID 83). 3346 bytes result sent to driver 20/07/01 11:33:16.339 Executor task launch worker for task 83 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.343 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 84 20/07/01 11:33:16.343 Executor task launch worker for task 84 INFO Executor: Running task 74.0 in stage 2.0 (TID 84) 20/07/01 11:33:16.343 Executor task launch worker for task 84 DEBUG Executor: Task 84's epoch is 1 20/07/01 11:33:16.343 Executor task launch worker for task 84 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.345 Executor task launch worker for task 84 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.345 Executor task launch worker for task 84 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 74-75 20/07/01 11:33:16.346 Executor task launch worker for task 84 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.346 Executor task launch worker for task 84 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.346 Executor task launch worker for task 84 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.346 Executor task launch worker for task 84 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.346 Executor task launch worker for task 84 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.347 Executor task launch worker for task 84 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.349 Executor task launch worker for task 84 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.349 Executor task launch worker for task 84 DEBUG TaskMemoryManager: Task 84 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@35da6d8a 20/07/01 11:33:16.350 Executor task launch worker for task 84 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.350 Executor task launch worker for task 84 DEBUG TaskMemoryManager: Task 84 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@35da6d8a 20/07/01 11:33:16.351 Executor task launch worker for task 84 INFO Executor: Finished task 74.0 in stage 2.0 (TID 84). 3346 bytes result sent to driver 20/07/01 11:33:16.351 Executor task launch worker for task 84 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.355 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 85 20/07/01 11:33:16.355 Executor task launch worker for task 85 INFO Executor: Running task 75.0 in stage 2.0 (TID 85) 20/07/01 11:33:16.355 Executor task launch worker for task 85 DEBUG Executor: Task 85's epoch is 1 20/07/01 11:33:16.355 Executor task launch worker for task 85 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.357 Executor task launch worker for task 85 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.357 Executor task launch worker for task 85 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 75-76 20/07/01 11:33:16.357 Executor task launch worker for task 85 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.358 Executor task launch worker for task 85 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.358 Executor task launch worker for task 85 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.358 Executor task launch worker for task 85 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.358 Executor task launch worker for task 85 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.359 Executor task launch worker for task 85 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.360 Executor task launch worker for task 85 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.360 Executor task launch worker for task 85 DEBUG TaskMemoryManager: Task 85 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@1c1596e7 20/07/01 11:33:16.361 Executor task launch worker for task 85 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.361 Executor task launch worker for task 85 DEBUG TaskMemoryManager: Task 85 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@1c1596e7 20/07/01 11:33:16.362 Executor task launch worker for task 85 INFO Executor: Finished task 75.0 in stage 2.0 (TID 85). 3346 bytes result sent to driver 20/07/01 11:33:16.362 Executor task launch worker for task 85 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.366 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 86 20/07/01 11:33:16.366 Executor task launch worker for task 86 INFO Executor: Running task 76.0 in stage 2.0 (TID 86) 20/07/01 11:33:16.367 Executor task launch worker for task 86 DEBUG Executor: Task 86's epoch is 1 20/07/01 11:33:16.367 Executor task launch worker for task 86 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.369 Executor task launch worker for task 86 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.369 Executor task launch worker for task 86 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 76-77 20/07/01 11:33:16.369 Executor task launch worker for task 86 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.369 Executor task launch worker for task 86 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.370 Executor task launch worker for task 86 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.370 Executor task launch worker for task 86 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.370 Executor task launch worker for task 86 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.371 Executor task launch worker for task 86 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.372 Executor task launch worker for task 86 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.373 Executor task launch worker for task 86 DEBUG TaskMemoryManager: Task 86 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@e83a89b 20/07/01 11:33:16.374 Executor task launch worker for task 86 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.374 Executor task launch worker for task 86 DEBUG TaskMemoryManager: Task 86 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@e83a89b 20/07/01 11:33:16.375 Executor task launch worker for task 86 INFO Executor: Finished task 76.0 in stage 2.0 (TID 86). 3346 bytes result sent to driver 20/07/01 11:33:16.375 Executor task launch worker for task 86 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.378 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 87 20/07/01 11:33:16.379 Executor task launch worker for task 87 INFO Executor: Running task 77.0 in stage 2.0 (TID 87) 20/07/01 11:33:16.379 Executor task launch worker for task 87 DEBUG Executor: Task 87's epoch is 1 20/07/01 11:33:16.379 Executor task launch worker for task 87 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.381 Executor task launch worker for task 87 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.381 Executor task launch worker for task 87 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 77-78 20/07/01 11:33:16.381 Executor task launch worker for task 87 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.381 Executor task launch worker for task 87 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.381 Executor task launch worker for task 87 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.381 Executor task launch worker for task 87 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.381 Executor task launch worker for task 87 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.383 Executor task launch worker for task 87 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.384 Executor task launch worker for task 87 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.384 Executor task launch worker for task 87 DEBUG TaskMemoryManager: Task 87 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@5879aa0 20/07/01 11:33:16.385 Executor task launch worker for task 87 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.385 Executor task launch worker for task 87 DEBUG TaskMemoryManager: Task 87 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@5879aa0 20/07/01 11:33:16.385 Executor task launch worker for task 87 INFO Executor: Finished task 77.0 in stage 2.0 (TID 87). 3346 bytes result sent to driver 20/07/01 11:33:16.385 Executor task launch worker for task 87 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.388 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 88 20/07/01 11:33:16.389 Executor task launch worker for task 88 INFO Executor: Running task 78.0 in stage 2.0 (TID 88) 20/07/01 11:33:16.389 Executor task launch worker for task 88 DEBUG Executor: Task 88's epoch is 1 20/07/01 11:33:16.389 Executor task launch worker for task 88 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.391 Executor task launch worker for task 88 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.391 Executor task launch worker for task 88 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 78-79 20/07/01 11:33:16.391 Executor task launch worker for task 88 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.391 Executor task launch worker for task 88 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.391 Executor task launch worker for task 88 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.391 Executor task launch worker for task 88 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.391 Executor task launch worker for task 88 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.392 Executor task launch worker for task 88 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.393 Executor task launch worker for task 88 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.394 Executor task launch worker for task 88 DEBUG TaskMemoryManager: Task 88 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@5bb3373f 20/07/01 11:33:16.394 Executor task launch worker for task 88 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.395 Executor task launch worker for task 88 DEBUG TaskMemoryManager: Task 88 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@5bb3373f 20/07/01 11:33:16.395 Executor task launch worker for task 88 INFO Executor: Finished task 78.0 in stage 2.0 (TID 88). 3346 bytes result sent to driver 20/07/01 11:33:16.395 Executor task launch worker for task 88 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.399 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 89 20/07/01 11:33:16.399 Executor task launch worker for task 89 INFO Executor: Running task 79.0 in stage 2.0 (TID 89) 20/07/01 11:33:16.400 Executor task launch worker for task 89 DEBUG Executor: Task 89's epoch is 1 20/07/01 11:33:16.400 Executor task launch worker for task 89 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.402 Executor task launch worker for task 89 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.402 Executor task launch worker for task 89 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 79-80 20/07/01 11:33:16.402 Executor task launch worker for task 89 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.402 Executor task launch worker for task 89 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.402 Executor task launch worker for task 89 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.402 Executor task launch worker for task 89 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.402 Executor task launch worker for task 89 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.403 Executor task launch worker for task 89 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.404 Executor task launch worker for task 89 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.404 Executor task launch worker for task 89 DEBUG TaskMemoryManager: Task 89 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@39f1f9f6 20/07/01 11:33:16.405 Executor task launch worker for task 89 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.405 Executor task launch worker for task 89 DEBUG TaskMemoryManager: Task 89 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@39f1f9f6 20/07/01 11:33:16.406 Executor task launch worker for task 89 INFO Executor: Finished task 79.0 in stage 2.0 (TID 89). 3346 bytes result sent to driver 20/07/01 11:33:16.406 Executor task launch worker for task 89 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.410 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 90 20/07/01 11:33:16.410 Executor task launch worker for task 90 INFO Executor: Running task 80.0 in stage 2.0 (TID 90) 20/07/01 11:33:16.410 Executor task launch worker for task 90 DEBUG Executor: Task 90's epoch is 1 20/07/01 11:33:16.410 Executor task launch worker for task 90 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.412 Executor task launch worker for task 90 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.412 Executor task launch worker for task 90 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 80-81 20/07/01 11:33:16.413 Executor task launch worker for task 90 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.413 Executor task launch worker for task 90 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.413 Executor task launch worker for task 90 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.413 Executor task launch worker for task 90 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.413 Executor task launch worker for task 90 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.414 Executor task launch worker for task 90 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.415 Executor task launch worker for task 90 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.415 Executor task launch worker for task 90 DEBUG TaskMemoryManager: Task 90 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4832c982 20/07/01 11:33:16.416 Executor task launch worker for task 90 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.416 Executor task launch worker for task 90 DEBUG TaskMemoryManager: Task 90 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4832c982 20/07/01 11:33:16.417 Executor task launch worker for task 90 INFO Executor: Finished task 80.0 in stage 2.0 (TID 90). 3346 bytes result sent to driver 20/07/01 11:33:16.417 Executor task launch worker for task 90 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.421 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 91 20/07/01 11:33:16.421 Executor task launch worker for task 91 INFO Executor: Running task 81.0 in stage 2.0 (TID 91) 20/07/01 11:33:16.421 Executor task launch worker for task 91 DEBUG Executor: Task 91's epoch is 1 20/07/01 11:33:16.421 Executor task launch worker for task 91 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.423 Executor task launch worker for task 91 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.423 Executor task launch worker for task 91 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 81-82 20/07/01 11:33:16.423 Executor task launch worker for task 91 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.424 Executor task launch worker for task 91 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.424 Executor task launch worker for task 91 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.424 Executor task launch worker for task 91 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.424 Executor task launch worker for task 91 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.425 Executor task launch worker for task 91 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.426 Executor task launch worker for task 91 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.426 Executor task launch worker for task 91 DEBUG TaskMemoryManager: Task 91 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6486f55 20/07/01 11:33:16.427 Executor task launch worker for task 91 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.427 Executor task launch worker for task 91 DEBUG TaskMemoryManager: Task 91 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6486f55 20/07/01 11:33:16.428 Executor task launch worker for task 91 INFO Executor: Finished task 81.0 in stage 2.0 (TID 91). 3346 bytes result sent to driver 20/07/01 11:33:16.428 Executor task launch worker for task 91 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.431 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 92 20/07/01 11:33:16.432 Executor task launch worker for task 92 INFO Executor: Running task 82.0 in stage 2.0 (TID 92) 20/07/01 11:33:16.432 Executor task launch worker for task 92 DEBUG Executor: Task 92's epoch is 1 20/07/01 11:33:16.432 Executor task launch worker for task 92 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.434 Executor task launch worker for task 92 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.434 Executor task launch worker for task 92 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 82-83 20/07/01 11:33:16.434 Executor task launch worker for task 92 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.434 Executor task launch worker for task 92 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.434 Executor task launch worker for task 92 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.434 Executor task launch worker for task 92 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.434 Executor task launch worker for task 92 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.435 Executor task launch worker for task 92 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.436 Executor task launch worker for task 92 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.436 Executor task launch worker for task 92 DEBUG TaskMemoryManager: Task 92 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@5e48e75d 20/07/01 11:33:16.437 Executor task launch worker for task 92 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.437 Executor task launch worker for task 92 DEBUG TaskMemoryManager: Task 92 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@5e48e75d 20/07/01 11:33:16.438 Executor task launch worker for task 92 INFO Executor: Finished task 82.0 in stage 2.0 (TID 92). 3346 bytes result sent to driver 20/07/01 11:33:16.438 Executor task launch worker for task 92 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.442 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 93 20/07/01 11:33:16.443 Executor task launch worker for task 93 INFO Executor: Running task 83.0 in stage 2.0 (TID 93) 20/07/01 11:33:16.443 Executor task launch worker for task 93 DEBUG Executor: Task 93's epoch is 1 20/07/01 11:33:16.443 Executor task launch worker for task 93 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.445 Executor task launch worker for task 93 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.445 Executor task launch worker for task 93 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 83-84 20/07/01 11:33:16.445 Executor task launch worker for task 93 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.445 Executor task launch worker for task 93 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.445 Executor task launch worker for task 93 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.445 Executor task launch worker for task 93 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.445 Executor task launch worker for task 93 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.446 Executor task launch worker for task 93 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.447 Executor task launch worker for task 93 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.447 Executor task launch worker for task 93 DEBUG TaskMemoryManager: Task 93 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@c92980d 20/07/01 11:33:16.448 Executor task launch worker for task 93 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.448 Executor task launch worker for task 93 DEBUG TaskMemoryManager: Task 93 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@c92980d 20/07/01 11:33:16.449 Executor task launch worker for task 93 INFO Executor: Finished task 83.0 in stage 2.0 (TID 93). 3346 bytes result sent to driver 20/07/01 11:33:16.449 Executor task launch worker for task 93 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.452 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 94 20/07/01 11:33:16.453 Executor task launch worker for task 94 INFO Executor: Running task 84.0 in stage 2.0 (TID 94) 20/07/01 11:33:16.453 Executor task launch worker for task 94 DEBUG Executor: Task 94's epoch is 1 20/07/01 11:33:16.453 Executor task launch worker for task 94 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.455 Executor task launch worker for task 94 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.455 Executor task launch worker for task 94 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 84-85 20/07/01 11:33:16.456 Executor task launch worker for task 94 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.456 Executor task launch worker for task 94 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.456 Executor task launch worker for task 94 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.456 Executor task launch worker for task 94 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.456 Executor task launch worker for task 94 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.457 Executor task launch worker for task 94 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.458 Executor task launch worker for task 94 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.458 Executor task launch worker for task 94 DEBUG TaskMemoryManager: Task 94 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@1537752d 20/07/01 11:33:16.459 Executor task launch worker for task 94 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.459 Executor task launch worker for task 94 DEBUG TaskMemoryManager: Task 94 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@1537752d 20/07/01 11:33:16.459 Executor task launch worker for task 94 INFO Executor: Finished task 84.0 in stage 2.0 (TID 94). 3346 bytes result sent to driver 20/07/01 11:33:16.460 Executor task launch worker for task 94 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.463 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 95 20/07/01 11:33:16.463 Executor task launch worker for task 95 INFO Executor: Running task 85.0 in stage 2.0 (TID 95) 20/07/01 11:33:16.463 Executor task launch worker for task 95 DEBUG Executor: Task 95's epoch is 1 20/07/01 11:33:16.463 Executor task launch worker for task 95 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.465 Executor task launch worker for task 95 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.465 Executor task launch worker for task 95 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 85-86 20/07/01 11:33:16.465 Executor task launch worker for task 95 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.465 Executor task launch worker for task 95 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.465 Executor task launch worker for task 95 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.465 Executor task launch worker for task 95 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.465 Executor task launch worker for task 95 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.466 Executor task launch worker for task 95 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.467 Executor task launch worker for task 95 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.468 Executor task launch worker for task 95 DEBUG TaskMemoryManager: Task 95 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6596a007 20/07/01 11:33:16.468 Executor task launch worker for task 95 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.468 Executor task launch worker for task 95 DEBUG TaskMemoryManager: Task 95 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6596a007 20/07/01 11:33:16.469 Executor task launch worker for task 95 INFO Executor: Finished task 85.0 in stage 2.0 (TID 95). 3346 bytes result sent to driver 20/07/01 11:33:16.469 Executor task launch worker for task 95 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.472 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 96 20/07/01 11:33:16.472 Executor task launch worker for task 96 INFO Executor: Running task 86.0 in stage 2.0 (TID 96) 20/07/01 11:33:16.472 Executor task launch worker for task 96 DEBUG Executor: Task 96's epoch is 1 20/07/01 11:33:16.472 Executor task launch worker for task 96 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.474 Executor task launch worker for task 96 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.474 Executor task launch worker for task 96 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 86-87 20/07/01 11:33:16.474 Executor task launch worker for task 96 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.474 Executor task launch worker for task 96 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.474 Executor task launch worker for task 96 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.475 Executor task launch worker for task 96 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.475 Executor task launch worker for task 96 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.476 Executor task launch worker for task 96 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.476 Executor task launch worker for task 96 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.477 Executor task launch worker for task 96 DEBUG TaskMemoryManager: Task 96 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@28bda28b 20/07/01 11:33:16.477 Executor task launch worker for task 96 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.478 Executor task launch worker for task 96 DEBUG TaskMemoryManager: Task 96 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@28bda28b 20/07/01 11:33:16.478 Executor task launch worker for task 96 INFO Executor: Finished task 86.0 in stage 2.0 (TID 96). 3346 bytes result sent to driver 20/07/01 11:33:16.478 Executor task launch worker for task 96 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.481 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 97 20/07/01 11:33:16.481 Executor task launch worker for task 97 INFO Executor: Running task 87.0 in stage 2.0 (TID 97) 20/07/01 11:33:16.482 Executor task launch worker for task 97 DEBUG Executor: Task 97's epoch is 1 20/07/01 11:33:16.482 Executor task launch worker for task 97 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.484 Executor task launch worker for task 97 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.484 Executor task launch worker for task 97 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 87-88 20/07/01 11:33:16.484 Executor task launch worker for task 97 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.484 Executor task launch worker for task 97 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.484 Executor task launch worker for task 97 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.484 Executor task launch worker for task 97 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.484 Executor task launch worker for task 97 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.486 Executor task launch worker for task 97 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.488 Executor task launch worker for task 97 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.488 Executor task launch worker for task 97 DEBUG TaskMemoryManager: Task 97 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6e9642ab 20/07/01 11:33:16.489 Executor task launch worker for task 97 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.489 Executor task launch worker for task 97 DEBUG TaskMemoryManager: Task 97 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6e9642ab 20/07/01 11:33:16.490 Executor task launch worker for task 97 INFO Executor: Finished task 87.0 in stage 2.0 (TID 97). 3346 bytes result sent to driver 20/07/01 11:33:16.490 Executor task launch worker for task 97 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.493 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 98 20/07/01 11:33:16.493 Executor task launch worker for task 98 INFO Executor: Running task 88.0 in stage 2.0 (TID 98) 20/07/01 11:33:16.494 Executor task launch worker for task 98 DEBUG Executor: Task 98's epoch is 1 20/07/01 11:33:16.494 Executor task launch worker for task 98 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.496 Executor task launch worker for task 98 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.496 Executor task launch worker for task 98 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 88-89 20/07/01 11:33:16.496 Executor task launch worker for task 98 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.496 Executor task launch worker for task 98 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.496 Executor task launch worker for task 98 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.496 Executor task launch worker for task 98 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.496 Executor task launch worker for task 98 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.497 Executor task launch worker for task 98 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.498 Executor task launch worker for task 98 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.498 Executor task launch worker for task 98 DEBUG TaskMemoryManager: Task 98 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@1bb6ae0d 20/07/01 11:33:16.499 Executor task launch worker for task 98 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.499 Executor task launch worker for task 98 DEBUG TaskMemoryManager: Task 98 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@1bb6ae0d 20/07/01 11:33:16.500 Executor task launch worker for task 98 INFO Executor: Finished task 88.0 in stage 2.0 (TID 98). 3346 bytes result sent to driver 20/07/01 11:33:16.500 Executor task launch worker for task 98 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.504 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 99 20/07/01 11:33:16.504 Executor task launch worker for task 99 INFO Executor: Running task 90.0 in stage 2.0 (TID 99) 20/07/01 11:33:16.504 Executor task launch worker for task 99 DEBUG Executor: Task 99's epoch is 1 20/07/01 11:33:16.504 Executor task launch worker for task 99 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.506 Executor task launch worker for task 99 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.506 Executor task launch worker for task 99 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 90-91 20/07/01 11:33:16.506 Executor task launch worker for task 99 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.507 Executor task launch worker for task 99 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.507 Executor task launch worker for task 99 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.507 Executor task launch worker for task 99 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.507 Executor task launch worker for task 99 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.508 Executor task launch worker for task 99 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.509 Executor task launch worker for task 99 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.509 Executor task launch worker for task 99 DEBUG TaskMemoryManager: Task 99 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@2dc98138 20/07/01 11:33:16.509 Executor task launch worker for task 99 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.510 Executor task launch worker for task 99 DEBUG TaskMemoryManager: Task 99 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@2dc98138 20/07/01 11:33:16.510 Executor task launch worker for task 99 INFO Executor: Finished task 90.0 in stage 2.0 (TID 99). 3346 bytes result sent to driver 20/07/01 11:33:16.510 Executor task launch worker for task 99 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.514 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 100 20/07/01 11:33:16.514 Executor task launch worker for task 100 INFO Executor: Running task 91.0 in stage 2.0 (TID 100) 20/07/01 11:33:16.514 Executor task launch worker for task 100 DEBUG Executor: Task 100's epoch is 1 20/07/01 11:33:16.515 Executor task launch worker for task 100 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.516 Executor task launch worker for task 100 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.517 Executor task launch worker for task 100 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 91-92 20/07/01 11:33:16.517 Executor task launch worker for task 100 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.517 Executor task launch worker for task 100 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.517 Executor task launch worker for task 100 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.517 Executor task launch worker for task 100 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.517 Executor task launch worker for task 100 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.518 Executor task launch worker for task 100 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.519 Executor task launch worker for task 100 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.519 Executor task launch worker for task 100 DEBUG TaskMemoryManager: Task 100 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6870bbf5 20/07/01 11:33:16.520 Executor task launch worker for task 100 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.520 Executor task launch worker for task 100 DEBUG TaskMemoryManager: Task 100 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6870bbf5 20/07/01 11:33:16.521 Executor task launch worker for task 100 INFO Executor: Finished task 91.0 in stage 2.0 (TID 100). 3346 bytes result sent to driver 20/07/01 11:33:16.521 Executor task launch worker for task 100 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.524 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 101 20/07/01 11:33:16.524 Executor task launch worker for task 101 INFO Executor: Running task 92.0 in stage 2.0 (TID 101) 20/07/01 11:33:16.525 Executor task launch worker for task 101 DEBUG Executor: Task 101's epoch is 1 20/07/01 11:33:16.525 Executor task launch worker for task 101 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.527 Executor task launch worker for task 101 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.527 Executor task launch worker for task 101 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 92-93 20/07/01 11:33:16.527 Executor task launch worker for task 101 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.527 Executor task launch worker for task 101 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.527 Executor task launch worker for task 101 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.527 Executor task launch worker for task 101 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.527 Executor task launch worker for task 101 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.528 Executor task launch worker for task 101 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.529 Executor task launch worker for task 101 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.529 Executor task launch worker for task 101 DEBUG TaskMemoryManager: Task 101 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4f6afb8e 20/07/01 11:33:16.530 Executor task launch worker for task 101 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.530 Executor task launch worker for task 101 DEBUG TaskMemoryManager: Task 101 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4f6afb8e 20/07/01 11:33:16.531 Executor task launch worker for task 101 INFO Executor: Finished task 92.0 in stage 2.0 (TID 101). 3346 bytes result sent to driver 20/07/01 11:33:16.531 Executor task launch worker for task 101 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.534 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 102 20/07/01 11:33:16.535 Executor task launch worker for task 102 INFO Executor: Running task 93.0 in stage 2.0 (TID 102) 20/07/01 11:33:16.535 Executor task launch worker for task 102 DEBUG Executor: Task 102's epoch is 1 20/07/01 11:33:16.535 Executor task launch worker for task 102 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.537 Executor task launch worker for task 102 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.537 Executor task launch worker for task 102 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 93-94 20/07/01 11:33:16.538 Executor task launch worker for task 102 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.538 Executor task launch worker for task 102 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.538 Executor task launch worker for task 102 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.538 Executor task launch worker for task 102 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.538 Executor task launch worker for task 102 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.539 Executor task launch worker for task 102 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.541 Executor task launch worker for task 102 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.541 Executor task launch worker for task 102 DEBUG TaskMemoryManager: Task 102 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@38b2884c 20/07/01 11:33:16.543 Executor task launch worker for task 102 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.543 Executor task launch worker for task 102 DEBUG TaskMemoryManager: Task 102 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@38b2884c 20/07/01 11:33:16.544 Executor task launch worker for task 102 INFO Executor: Finished task 93.0 in stage 2.0 (TID 102). 3346 bytes result sent to driver 20/07/01 11:33:16.545 Executor task launch worker for task 102 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.548 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 103 20/07/01 11:33:16.549 Executor task launch worker for task 103 INFO Executor: Running task 94.0 in stage 2.0 (TID 103) 20/07/01 11:33:16.549 Executor task launch worker for task 103 DEBUG Executor: Task 103's epoch is 1 20/07/01 11:33:16.550 Executor task launch worker for task 103 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.551 Executor task launch worker for task 103 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.551 Executor task launch worker for task 103 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 94-95 20/07/01 11:33:16.552 Executor task launch worker for task 103 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.552 Executor task launch worker for task 103 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.552 Executor task launch worker for task 103 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.552 Executor task launch worker for task 103 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.552 Executor task launch worker for task 103 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.553 Executor task launch worker for task 103 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.554 Executor task launch worker for task 103 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.554 Executor task launch worker for task 103 DEBUG TaskMemoryManager: Task 103 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@a2644d4 20/07/01 11:33:16.555 Executor task launch worker for task 103 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.555 Executor task launch worker for task 103 DEBUG TaskMemoryManager: Task 103 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@a2644d4 20/07/01 11:33:16.556 Executor task launch worker for task 103 INFO Executor: Finished task 94.0 in stage 2.0 (TID 103). 3346 bytes result sent to driver 20/07/01 11:33:16.556 Executor task launch worker for task 103 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.559 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 104 20/07/01 11:33:16.559 Executor task launch worker for task 104 INFO Executor: Running task 95.0 in stage 2.0 (TID 104) 20/07/01 11:33:16.560 Executor task launch worker for task 104 DEBUG Executor: Task 104's epoch is 1 20/07/01 11:33:16.560 Executor task launch worker for task 104 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.561 Executor task launch worker for task 104 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.561 Executor task launch worker for task 104 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 95-96 20/07/01 11:33:16.562 Executor task launch worker for task 104 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.562 Executor task launch worker for task 104 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.562 Executor task launch worker for task 104 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.562 Executor task launch worker for task 104 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.562 Executor task launch worker for task 104 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.563 Executor task launch worker for task 104 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.564 Executor task launch worker for task 104 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.564 Executor task launch worker for task 104 DEBUG TaskMemoryManager: Task 104 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@12a9a25f 20/07/01 11:33:16.565 Executor task launch worker for task 104 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.565 Executor task launch worker for task 104 DEBUG TaskMemoryManager: Task 104 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@12a9a25f 20/07/01 11:33:16.566 Executor task launch worker for task 104 INFO Executor: Finished task 95.0 in stage 2.0 (TID 104). 3346 bytes result sent to driver 20/07/01 11:33:16.566 Executor task launch worker for task 104 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.569 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 105 20/07/01 11:33:16.569 Executor task launch worker for task 105 INFO Executor: Running task 96.0 in stage 2.0 (TID 105) 20/07/01 11:33:16.570 Executor task launch worker for task 105 DEBUG Executor: Task 105's epoch is 1 20/07/01 11:33:16.570 Executor task launch worker for task 105 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.571 Executor task launch worker for task 105 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.571 Executor task launch worker for task 105 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 96-97 20/07/01 11:33:16.572 Executor task launch worker for task 105 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.572 Executor task launch worker for task 105 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.572 Executor task launch worker for task 105 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.572 Executor task launch worker for task 105 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.572 Executor task launch worker for task 105 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.573 Executor task launch worker for task 105 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.574 Executor task launch worker for task 105 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.574 Executor task launch worker for task 105 DEBUG TaskMemoryManager: Task 105 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@48661200 20/07/01 11:33:16.575 Executor task launch worker for task 105 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.575 Executor task launch worker for task 105 DEBUG TaskMemoryManager: Task 105 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@48661200 20/07/01 11:33:16.576 Executor task launch worker for task 105 INFO Executor: Finished task 96.0 in stage 2.0 (TID 105). 3346 bytes result sent to driver 20/07/01 11:33:16.576 Executor task launch worker for task 105 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.579 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 106 20/07/01 11:33:16.580 Executor task launch worker for task 106 INFO Executor: Running task 97.0 in stage 2.0 (TID 106) 20/07/01 11:33:16.580 Executor task launch worker for task 106 DEBUG Executor: Task 106's epoch is 1 20/07/01 11:33:16.580 Executor task launch worker for task 106 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.582 Executor task launch worker for task 106 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.582 Executor task launch worker for task 106 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 97-98 20/07/01 11:33:16.582 Executor task launch worker for task 106 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.582 Executor task launch worker for task 106 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.582 Executor task launch worker for task 106 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.582 Executor task launch worker for task 106 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.582 Executor task launch worker for task 106 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.584 Executor task launch worker for task 106 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.592 Executor task launch worker for task 106 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.593 Executor task launch worker for task 106 DEBUG TaskMemoryManager: Task 106 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@17a7599d 20/07/01 11:33:16.594 Executor task launch worker for task 106 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.594 Executor task launch worker for task 106 DEBUG TaskMemoryManager: Task 106 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@17a7599d 20/07/01 11:33:16.595 Executor task launch worker for task 106 INFO Executor: Finished task 97.0 in stage 2.0 (TID 106). 3389 bytes result sent to driver 20/07/01 11:33:16.595 Executor task launch worker for task 106 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.599 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 107 20/07/01 11:33:16.599 Executor task launch worker for task 107 INFO Executor: Running task 98.0 in stage 2.0 (TID 107) 20/07/01 11:33:16.600 Executor task launch worker for task 107 DEBUG Executor: Task 107's epoch is 1 20/07/01 11:33:16.600 Executor task launch worker for task 107 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.602 Executor task launch worker for task 107 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.602 Executor task launch worker for task 107 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 98-99 20/07/01 11:33:16.602 Executor task launch worker for task 107 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.603 Executor task launch worker for task 107 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.603 Executor task launch worker for task 107 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.603 Executor task launch worker for task 107 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.603 Executor task launch worker for task 107 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.604 Executor task launch worker for task 107 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.606 Executor task launch worker for task 107 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.606 Executor task launch worker for task 107 DEBUG TaskMemoryManager: Task 107 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@795528dd 20/07/01 11:33:16.607 Executor task launch worker for task 107 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.607 Executor task launch worker for task 107 DEBUG TaskMemoryManager: Task 107 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@795528dd 20/07/01 11:33:16.608 Executor task launch worker for task 107 INFO Executor: Finished task 98.0 in stage 2.0 (TID 107). 3346 bytes result sent to driver 20/07/01 11:33:16.608 Executor task launch worker for task 107 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.612 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 108 20/07/01 11:33:16.612 Executor task launch worker for task 108 INFO Executor: Running task 99.0 in stage 2.0 (TID 108) 20/07/01 11:33:16.612 Executor task launch worker for task 108 DEBUG Executor: Task 108's epoch is 1 20/07/01 11:33:16.613 Executor task launch worker for task 108 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.615 Executor task launch worker for task 108 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.615 Executor task launch worker for task 108 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 99-100 20/07/01 11:33:16.616 Executor task launch worker for task 108 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.616 Executor task launch worker for task 108 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.616 Executor task launch worker for task 108 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.616 Executor task launch worker for task 108 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.616 Executor task launch worker for task 108 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.617 Executor task launch worker for task 108 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.619 Executor task launch worker for task 108 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.619 Executor task launch worker for task 108 DEBUG TaskMemoryManager: Task 108 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@2c0b59c9 20/07/01 11:33:16.620 Executor task launch worker for task 108 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.620 Executor task launch worker for task 108 DEBUG TaskMemoryManager: Task 108 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@2c0b59c9 20/07/01 11:33:16.621 Executor task launch worker for task 108 INFO Executor: Finished task 99.0 in stage 2.0 (TID 108). 3346 bytes result sent to driver 20/07/01 11:33:16.622 Executor task launch worker for task 108 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.625 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 109 20/07/01 11:33:16.625 Executor task launch worker for task 109 INFO Executor: Running task 100.0 in stage 2.0 (TID 109) 20/07/01 11:33:16.625 Executor task launch worker for task 109 DEBUG Executor: Task 109's epoch is 1 20/07/01 11:33:16.625 Executor task launch worker for task 109 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.627 Executor task launch worker for task 109 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.627 Executor task launch worker for task 109 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 100-101 20/07/01 11:33:16.627 Executor task launch worker for task 109 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.628 Executor task launch worker for task 109 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.628 Executor task launch worker for task 109 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.628 Executor task launch worker for task 109 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.628 Executor task launch worker for task 109 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.629 Executor task launch worker for task 109 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.629 Executor task launch worker for task 109 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.630 Executor task launch worker for task 109 DEBUG TaskMemoryManager: Task 109 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@c85252d 20/07/01 11:33:16.630 Executor task launch worker for task 109 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.630 Executor task launch worker for task 109 DEBUG TaskMemoryManager: Task 109 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@c85252d 20/07/01 11:33:16.633 Executor task launch worker for task 109 INFO Executor: Finished task 100.0 in stage 2.0 (TID 109). 3346 bytes result sent to driver 20/07/01 11:33:16.633 Executor task launch worker for task 109 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.638 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 110 20/07/01 11:33:16.640 Executor task launch worker for task 110 INFO Executor: Running task 101.0 in stage 2.0 (TID 110) 20/07/01 11:33:16.641 Executor task launch worker for task 110 DEBUG Executor: Task 110's epoch is 1 20/07/01 11:33:16.641 Executor task launch worker for task 110 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.643 Executor task launch worker for task 110 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.643 Executor task launch worker for task 110 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 101-102 20/07/01 11:33:16.643 Executor task launch worker for task 110 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.644 Executor task launch worker for task 110 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.644 Executor task launch worker for task 110 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.644 Executor task launch worker for task 110 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.644 Executor task launch worker for task 110 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.645 Executor task launch worker for task 110 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.646 Executor task launch worker for task 110 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.646 Executor task launch worker for task 110 DEBUG TaskMemoryManager: Task 110 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@108b1922 20/07/01 11:33:16.647 Executor task launch worker for task 110 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.647 Executor task launch worker for task 110 DEBUG TaskMemoryManager: Task 110 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@108b1922 20/07/01 11:33:16.647 Executor task launch worker for task 110 INFO Executor: Finished task 101.0 in stage 2.0 (TID 110). 3346 bytes result sent to driver 20/07/01 11:33:16.648 Executor task launch worker for task 110 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.652 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 111 20/07/01 11:33:16.652 Executor task launch worker for task 111 INFO Executor: Running task 104.0 in stage 2.0 (TID 111) 20/07/01 11:33:16.653 Executor task launch worker for task 111 DEBUG Executor: Task 111's epoch is 1 20/07/01 11:33:16.653 Executor task launch worker for task 111 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.655 Executor task launch worker for task 111 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.655 Executor task launch worker for task 111 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 104-105 20/07/01 11:33:16.655 Executor task launch worker for task 111 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.655 Executor task launch worker for task 111 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.655 Executor task launch worker for task 111 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.655 Executor task launch worker for task 111 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.655 Executor task launch worker for task 111 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.656 Executor task launch worker for task 111 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.657 Executor task launch worker for task 111 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.657 Executor task launch worker for task 111 DEBUG TaskMemoryManager: Task 111 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@6fbc4dcb 20/07/01 11:33:16.658 Executor task launch worker for task 111 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.658 Executor task launch worker for task 111 DEBUG TaskMemoryManager: Task 111 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@6fbc4dcb 20/07/01 11:33:16.659 Executor task launch worker for task 111 INFO Executor: Finished task 104.0 in stage 2.0 (TID 111). 3346 bytes result sent to driver 20/07/01 11:33:16.659 Executor task launch worker for task 111 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.663 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 112 20/07/01 11:33:16.663 Executor task launch worker for task 112 INFO Executor: Running task 105.0 in stage 2.0 (TID 112) 20/07/01 11:33:16.663 Executor task launch worker for task 112 DEBUG Executor: Task 112's epoch is 1 20/07/01 11:33:16.663 Executor task launch worker for task 112 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.665 Executor task launch worker for task 112 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.665 Executor task launch worker for task 112 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 105-106 20/07/01 11:33:16.665 Executor task launch worker for task 112 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.666 Executor task launch worker for task 112 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.666 Executor task launch worker for task 112 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.666 Executor task launch worker for task 112 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.666 Executor task launch worker for task 112 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.667 Executor task launch worker for task 112 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.668 Executor task launch worker for task 112 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.668 Executor task launch worker for task 112 DEBUG TaskMemoryManager: Task 112 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@1b0fd1c1 20/07/01 11:33:16.668 Executor task launch worker for task 112 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.669 Executor task launch worker for task 112 DEBUG TaskMemoryManager: Task 112 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@1b0fd1c1 20/07/01 11:33:16.669 Executor task launch worker for task 112 INFO Executor: Finished task 105.0 in stage 2.0 (TID 112). 3346 bytes result sent to driver 20/07/01 11:33:16.669 Executor task launch worker for task 112 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.673 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 113 20/07/01 11:33:16.673 Executor task launch worker for task 113 INFO Executor: Running task 106.0 in stage 2.0 (TID 113) 20/07/01 11:33:16.674 Executor task launch worker for task 113 DEBUG Executor: Task 113's epoch is 1 20/07/01 11:33:16.674 Executor task launch worker for task 113 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.675 Executor task launch worker for task 113 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.675 Executor task launch worker for task 113 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 106-107 20/07/01 11:33:16.675 Executor task launch worker for task 113 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.676 Executor task launch worker for task 113 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.676 Executor task launch worker for task 113 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.676 Executor task launch worker for task 113 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.676 Executor task launch worker for task 113 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.676 Executor task launch worker for task 113 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.677 Executor task launch worker for task 113 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.677 Executor task launch worker for task 113 DEBUG TaskMemoryManager: Task 113 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4f1b7043 20/07/01 11:33:16.678 Executor task launch worker for task 113 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.678 Executor task launch worker for task 113 DEBUG TaskMemoryManager: Task 113 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4f1b7043 20/07/01 11:33:16.679 Executor task launch worker for task 113 INFO Executor: Finished task 106.0 in stage 2.0 (TID 113). 3346 bytes result sent to driver 20/07/01 11:33:16.679 Executor task launch worker for task 113 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.682 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 114 20/07/01 11:33:16.682 Executor task launch worker for task 114 INFO Executor: Running task 108.0 in stage 2.0 (TID 114) 20/07/01 11:33:16.683 Executor task launch worker for task 114 DEBUG Executor: Task 114's epoch is 1 20/07/01 11:33:16.683 Executor task launch worker for task 114 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.686 Executor task launch worker for task 114 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.686 Executor task launch worker for task 114 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 108-109 20/07/01 11:33:16.686 Executor task launch worker for task 114 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.686 Executor task launch worker for task 114 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.686 Executor task launch worker for task 114 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.686 Executor task launch worker for task 114 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.686 Executor task launch worker for task 114 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.688 Executor task launch worker for task 114 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.689 Executor task launch worker for task 114 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.689 Executor task launch worker for task 114 DEBUG TaskMemoryManager: Task 114 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4c0d184c 20/07/01 11:33:16.690 Executor task launch worker for task 114 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.690 Executor task launch worker for task 114 DEBUG TaskMemoryManager: Task 114 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4c0d184c 20/07/01 11:33:16.690 Executor task launch worker for task 114 INFO Executor: Finished task 108.0 in stage 2.0 (TID 114). 3346 bytes result sent to driver 20/07/01 11:33:16.690 Executor task launch worker for task 114 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.694 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 115 20/07/01 11:33:16.694 Executor task launch worker for task 115 INFO Executor: Running task 109.0 in stage 2.0 (TID 115) 20/07/01 11:33:16.695 Executor task launch worker for task 115 DEBUG Executor: Task 115's epoch is 1 20/07/01 11:33:16.695 Executor task launch worker for task 115 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.697 Executor task launch worker for task 115 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.697 Executor task launch worker for task 115 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 109-110 20/07/01 11:33:16.698 Executor task launch worker for task 115 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.698 Executor task launch worker for task 115 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.698 Executor task launch worker for task 115 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.698 Executor task launch worker for task 115 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.698 Executor task launch worker for task 115 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.699 Executor task launch worker for task 115 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.701 Executor task launch worker for task 115 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.701 Executor task launch worker for task 115 DEBUG TaskMemoryManager: Task 115 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@27fbed20 20/07/01 11:33:16.702 Executor task launch worker for task 115 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.702 Executor task launch worker for task 115 DEBUG TaskMemoryManager: Task 115 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@27fbed20 20/07/01 11:33:16.703 Executor task launch worker for task 115 INFO Executor: Finished task 109.0 in stage 2.0 (TID 115). 3346 bytes result sent to driver 20/07/01 11:33:16.703 Executor task launch worker for task 115 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.707 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 116 20/07/01 11:33:16.707 Executor task launch worker for task 116 INFO Executor: Running task 110.0 in stage 2.0 (TID 116) 20/07/01 11:33:16.707 Executor task launch worker for task 116 DEBUG Executor: Task 116's epoch is 1 20/07/01 11:33:16.707 Executor task launch worker for task 116 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.709 Executor task launch worker for task 116 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.709 Executor task launch worker for task 116 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 110-111 20/07/01 11:33:16.709 Executor task launch worker for task 116 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.709 Executor task launch worker for task 116 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.709 Executor task launch worker for task 116 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.709 Executor task launch worker for task 116 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.709 Executor task launch worker for task 116 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.710 Executor task launch worker for task 116 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.711 Executor task launch worker for task 116 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.712 Executor task launch worker for task 116 DEBUG TaskMemoryManager: Task 116 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@49c11ef9 20/07/01 11:33:16.712 Executor task launch worker for task 116 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.712 Executor task launch worker for task 116 DEBUG TaskMemoryManager: Task 116 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@49c11ef9 20/07/01 11:33:16.713 Executor task launch worker for task 116 INFO Executor: Finished task 110.0 in stage 2.0 (TID 116). 3346 bytes result sent to driver 20/07/01 11:33:16.713 Executor task launch worker for task 116 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.716 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 117 20/07/01 11:33:16.716 Executor task launch worker for task 117 INFO Executor: Running task 111.0 in stage 2.0 (TID 117) 20/07/01 11:33:16.716 Executor task launch worker for task 117 DEBUG Executor: Task 117's epoch is 1 20/07/01 11:33:16.716 Executor task launch worker for task 117 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.718 Executor task launch worker for task 117 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.718 Executor task launch worker for task 117 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 111-112 20/07/01 11:33:16.718 Executor task launch worker for task 117 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.718 Executor task launch worker for task 117 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.718 Executor task launch worker for task 117 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.718 Executor task launch worker for task 117 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.718 Executor task launch worker for task 117 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.719 Executor task launch worker for task 117 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.720 Executor task launch worker for task 117 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.720 Executor task launch worker for task 117 DEBUG TaskMemoryManager: Task 117 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4d14937c 20/07/01 11:33:16.721 Executor task launch worker for task 117 DEBUG GenerateUnsafeProjection: code for input[0, bigint, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 030 */ /* 031 */ boolean isNull_0 = i.isNullAt(0); /* 032 */ long value_0 = isNull_0 ? /* 033 */ -1L : (i.getLong(0)); /* 034 */ if (isNull_0) { /* 035 */ mutableStateArray_0[0].setNullAt(0); /* 036 */ } else { /* 037 */ mutableStateArray_0[0].write(0, value_0); /* 038 */ } /* 039 */ return (mutableStateArray_0[0].getRow()); /* 040 */ } /* 041 */ /* 042 */ /* 043 */ } 20/07/01 11:33:16.721 Executor task launch worker for task 117 DEBUG TaskMemoryManager: Task 117 release 256.0 KiB from org.apache.spark.unsafe.map.BytesToBytesMap@4d14937c 20/07/01 11:33:16.722 Executor task launch worker for task 117 INFO Executor: Finished task 111.0 in stage 2.0 (TID 117). 3346 bytes result sent to driver 20/07/01 11:33:16.722 Executor task launch worker for task 117 DEBUG ExecutorMetricsPoller: removing (2, 0) from stageTCMP 20/07/01 11:33:16.726 dispatcher-Executor INFO CoarseGrainedExecutorBackend: Got assigned task 118 20/07/01 11:33:16.726 Executor task launch worker for task 118 INFO Executor: Running task 112.0 in stage 2.0 (TID 118) 20/07/01 11:33:16.727 Executor task launch worker for task 118 DEBUG Executor: Task 118's epoch is 1 20/07/01 11:33:16.727 Executor task launch worker for task 118 DEBUG ExecutorMetricsPoller: stageTCMP: (2, 0) -> 1 20/07/01 11:33:16.729 Executor task launch worker for task 118 DEBUG MapOutputTrackerWorker: Fetching outputs for shuffle 0 20/07/01 11:33:16.729 Executor task launch worker for task 118 DEBUG MapOutputTrackerWorker: Convert map statuses for shuffle 0, mappers 0-2, partitions 112-113 20/07/01 11:33:16.729 Executor task launch worker for task 118 DEBUG ShuffleBlockFetcherIterator: maxBytesInFlight: 50331648, targetRemoteRequestSize: 10066329, maxBlocksInFlightPerAddress: 2147483647 20/07/01 11:33:16.729 Executor task launch worker for task 118 INFO ShuffleBlockFetcherIterator: Getting 0 (0.0 B) non-empty blocks including 0 (0.0 B) local and 0 (0.0 B) host-local and 0 (0.0 B) remote blocks 20/07/01 11:33:16.729 Executor task launch worker for task 118 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 0 ms 20/07/01 11:33:16.729 Executor task launch worker for task 118 DEBUG ShuffleBlockFetcherIterator: Start fetching local blocks: 20/07/01 11:33:16.729 Executor task launch worker for task 118 DEBUG ShuffleBlockFetcherIterator: Got local blocks in 0 ms 20/07/01 11:33:16.730 Executor task launch worker for task 118 DEBUG GenerateUnsafeProjection: code for 0: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ /* 010 */ public SpecificUnsafeProjection(Object[] references) { /* 011 */ this.references = references; /* 012 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0); /* 013 */ /* 014 */ } /* 015 */ /* 016 */ public void initialize(int partitionIndex) { /* 017 */ /* 018 */ } /* 019 */ /* 020 */ // Scala.Function1 need this /* 021 */ public java.lang.Object apply(java.lang.Object row) { /* 022 */ return apply((InternalRow) row); /* 023 */ } /* 024 */ /* 025 */ public UnsafeRow apply(InternalRow i) { /* 026 */ mutableStateArray_0[0].reset(); /* 027 */ /* 028 */ /* 029 */ /* 030 */ /* 031 */ /* 032 */ mutableStateArray_0[0].write(0, 0L); /* 033 */ return (mutableStateArray_0[0].getRow()); /* 034 */ } /* 035 */ /* 036 */ /* 037 */ } 20/07/01 11:33:16.731 Executor task launch worker for task 118 DEBUG GenerateUnsafeProjection: code for input[0, array, true]: /* 001 */ public java.lang.Object generate(Object[] references) { /* 002 */ return new SpecificUnsafeProjection(references); /* 003 */ } /* 004 */ /* 005 */ class SpecificUnsafeProjection extends org.apache.spark.sql.catalyst.expressions.UnsafeProjection { /* 006 */ /* 007 */ private Object[] references; /* 008 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] mutableStateArray_0 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[1]; /* 009 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[] mutableStateArray_1 = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter[1]; /* 010 */ /* 011 */ public SpecificUnsafeProjection(Object[] references) { /* 012 */ this.references = references; /* 013 */ mutableStateArray_0[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 32); /* 014 */ mutableStateArray_1[0] = new org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter(mutableStateArray_0[0], 4); /* 015 */ /* 016 */ } /* 017 */ /* 018 */ public void initialize(int partitionIndex) { /* 019 */ /* 020 */ } /* 021 */ /* 022 */ // Scala.Function1 need this /* 023 */ public java.lang.Object apply(java.lang.Object row) { /* 024 */ return apply((InternalRow) row); /* 025 */ } /* 026 */ /* 027 */ public UnsafeRow apply(InternalRow i) { /* 028 */ mutableStateArray_0[0].reset(); /* 029 */ /* 030 */ /* 031 */ mutableStateArray_0[0].zeroOutNullBytes(); /* 032 */ /* 033 */ boolean isNull_0 = i.isNullAt(0); /* 034 */ ArrayData value_0 = isNull_0 ? /* 035 */ null : (i.getArray(0)); /* 036 */ if (isNull_0) { /* 037 */ mutableStateArray_0[0].setNullAt(0); /* 038 */ } else { /* 039 */ // Remember the current cursor so that we can calculate how many bytes are /* 040 */ // written later. /* 041 */ final int previousCursor_0 = mutableStateArray_0[0].cursor(); /* 042 */ /* 043 */ final ArrayData tmpInput_0 = value_0; /* 044 */ if (tmpInput_0 instanceof UnsafeArrayData) { /* 045 */ mutableStateArray_0[0].write((UnsafeArrayData) tmpInput_0); /* 046 */ } else { /* 047 */ final int numElements_0 = tmpInput_0.numElements(); /* 048 */ mutableStateArray_1[0].initialize(numElements_0); /* 049 */ /* 050 */ for (int index_0 = 0; index_0 < numElements_0; index_0++) { /* 051 */ /* 052 */ if (tmpInput_0.isNullAt(index_0)) { /* 053 */ mutableStateArray_1[0].setNull4Bytes(index_0); /* 054 */ } else { /* 055 */ mutableStateArray_1[0].write(index_0, tmpInput_0.getInt(index_0)); /* 056 */ } /* 057 */ /* 058 */ } /* 059 */ } /* 060 */ /* 061 */ mutableStateArray_0[0].setOffsetAndSizeFromPreviousCursor(0, previousCursor_0); /* 062 */ } /* 063 */ return (mutableStateArray_0[0].getRow()); /* 064 */ } /* 065 */ /* 066 */ /* 067 */ } 20/07/01 11:33:16.732 Executor task launch worker for task 118 DEBUG TaskMemoryManager: Task 118 acquired 256.0 KiB for org.apache.spark.unsafe.map.BytesToBytesMap@4c457292 20/07/01 11:33:16.733 Executor task launch worker for task 118 DEBUG GenerateUnsafeProjection: code for input[0, bigi