FailedConsole Output

Skipping 18,516 KB.. Full Log
tiveMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.sql.hive.client.Shim_v0_13.getPartitionsByFilter(HiveShim.scala:721)
	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionsByFilter$1.apply(HiveClientImpl.scala:650)
	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionsByFilter$1.apply(HiveClientImpl.scala:648)
	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:273)
	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:211)
	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:210)
	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:256)
	at org.apache.spark.sql.hive.client.HiveClientImpl.getPartitionsByFilter(HiveClientImpl.scala:648)
	at org.apache.spark.sql.hive.client.HiveClientSuite$$anonfun$15.apply(HiveClientSuite.scala:75)
	at org.apache.spark.sql.hive.client.HiveClientSuite$$anonfun$15.apply(HiveClientSuite.scala:73)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite$class.run(Suite.scala:1147)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1210)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1257)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1255)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1255)
	at org.apache.spark.sql.hive.client.HiveClientSuites.runNestedSuites(HiveClientSuites.scala:24)
	at org.scalatest.Suite$class.run(Suite.scala:1144)
	at org.apache.spark.sql.hive.client.HiveClientSuites.run(HiveClientSuites.scala:24)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: MetaException(message:Filtering is supported only on partition keys of type string)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$FilterBuilder.setError(ExpressionTree.java:185)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$LeafNode.getJdoFilterPushdownParam(ExpressionTree.java:440)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$LeafNode.generateJDOFilterOverPartitions(ExpressionTree.java:357)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$LeafNode.generateJDOFilter(ExpressionTree.java:279)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree.generateJDOFilterFragment(ExpressionTree.java:578)
	at org.apache.hadoop.hive.metastore.ObjectStore.makeQueryFilterString(ObjectStore.java:2615)
	at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsViaOrmFilter(ObjectStore.java:2199)
	at org.apache.hadoop.hive.metastore.ObjectStore.access$500(ObjectStore.java:160)
	at org.apache.hadoop.hive.metastore.ObjectStore$5.getJdoResult(ObjectStore.java:2530)
	at org.apache.hadoop.hive.metastore.ObjectStore$5.getJdoResult(ObjectStore.java:2515)
	at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2391)
	at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByFilterInternal(ObjectStore.java:2532)
	at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByFilter(ObjectStore.java:2335)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:114)
	at com.sun.proxy.$Proxy165.getPartitionsByFilter(Unknown Source)
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_filter(HiveMetaStore.java:4448)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
	at com.sun.proxy.$Proxy167.get_partitions_by_filter(Unknown Source)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByFilter(HiveMetaStoreClient.java:1105)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
	at com.sun.proxy.$Proxy168.listPartitionsByFilter(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByFilter(Hive.java:2254)
	... 63 more
[info] - 1.2: getPartitionsByFilter returns all partitions when hive.metastore.try.direct.sql=false (35 seconds, 413 milliseconds)
[info] - 1.2: getPartitionsByFilter: ds=20170101 (353 milliseconds)
[info] - 1.2: getPartitionsByFilter: ds=(20170101 + 1) and h=0 (165 milliseconds)
[info] - 1.2: getPartitionsByFilter: chunk='aa' (186 milliseconds)
[info] - 1.2: getPartitionsByFilter: 20170101=ds (128 milliseconds)
[info] - 1.2: getPartitionsByFilter: ds=20170101 and h=10 (172 milliseconds)
[info] - 1.2: getPartitionsByFilter: ds=20170101 or ds=20170102 (224 milliseconds)
[info] - 1.2: getPartitionsByFilter: ds in (20170102, 20170103) (using IN expression) (200 milliseconds)
[info] - 1.2: getPartitionsByFilter: ds in (20170102, 20170103) (using INSET expression) (89 milliseconds)
[info] - 1.2: getPartitionsByFilter: chunk in ('ab', 'ba') (using IN expression) (161 milliseconds)
[info] - 1.2: getPartitionsByFilter: chunk in ('ab', 'ba') (using INSET expression) (59 milliseconds)
[info] - 1.2: getPartitionsByFilter: (ds=20170101 and h>=8) or (ds=20170102 and h<8) (177 milliseconds)
[info] - 1.2: getPartitionsByFilter: (ds=20170101 and h>=8) or (ds=20170102 and h<(7+1)) (152 milliseconds)
[info] - 1.2: getPartitionsByFilter: chunk in ('ab', 'ba') and ((ds=20170101 and h>=8) or (ds=20170102 and h<8)) (209 milliseconds)
[info] HiveClientSuite(2.0):
20:23:54.102 WARN org.apache.hadoop.hive.metastore.ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 2.0.0
20:23:54.280 WARN org.apache.hadoop.hive.metastore.ObjectStore: Failed to get database default, returning NoSuchObjectException
20:23:54.848 WARN org.apache.hadoop.hive.ql.session.SessionState: METASTORE_FILTER_HOOK will be ignored, since hive.security.authorization.manager is set to instance of HiveAuthorizerFactory.
20:24:26.786 WARN org.apache.hadoop.hive.metastore.ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 2.0.0
20:24:27.837 WARN org.apache.hadoop.hive.ql.session.SessionState: METASTORE_FILTER_HOOK will be ignored, since hive.security.authorization.manager is set to instance of HiveAuthorizerFactory.
20:24:56.737 WARN org.apache.spark.sql.hive.client.Shim_v2_0: Caught Hive MetaException attempting to get partition metadata by filter from Hive. Falling back to fetching all partition metadata, which will degrade performance. Modifying your Hive metastore configuration to set hive.metastore.try.direct.sql to true may resolve this problem.
java.lang.reflect.InvocationTargetException
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.sql.hive.client.Shim_v0_13.getPartitionsByFilter(HiveShim.scala:721)
	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionsByFilter$1.apply(HiveClientImpl.scala:650)
	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionsByFilter$1.apply(HiveClientImpl.scala:648)
	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:273)
	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:211)
	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:210)
	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:256)
	at org.apache.spark.sql.hive.client.HiveClientImpl.getPartitionsByFilter(HiveClientImpl.scala:648)
	at org.apache.spark.sql.hive.client.HiveClientSuite$$anonfun$15.apply(HiveClientSuite.scala:75)
	at org.apache.spark.sql.hive.client.HiveClientSuite$$anonfun$15.apply(HiveClientSuite.scala:73)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite$class.run(Suite.scala:1147)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1210)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1257)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1255)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1255)
	at org.apache.spark.sql.hive.client.HiveClientSuites.runNestedSuites(HiveClientSuites.scala:24)
	at org.scalatest.Suite$class.run(Suite.scala:1144)
	at org.apache.spark.sql.hive.client.HiveClientSuites.run(HiveClientSuites.scala:24)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: MetaException(message:Filtering is supported only on partition keys of type string)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$FilterBuilder.setError(ExpressionTree.java:184)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$LeafNode.getJdoFilterPushdownParam(ExpressionTree.java:439)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$LeafNode.generateJDOFilterOverPartitions(ExpressionTree.java:356)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$LeafNode.generateJDOFilter(ExpressionTree.java:278)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree.generateJDOFilterFragment(ExpressionTree.java:583)
	at org.apache.hadoop.hive.metastore.ObjectStore.makeQueryFilterString(ObjectStore.java:2704)
	at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsViaOrmFilter(ObjectStore.java:2312)
	at org.apache.hadoop.hive.metastore.ObjectStore.access$500(ObjectStore.java:166)
	at org.apache.hadoop.hive.metastore.ObjectStore$5.getJdoResult(ObjectStore.java:2637)
	at org.apache.hadoop.hive.metastore.ObjectStore$5.getJdoResult(ObjectStore.java:2622)
	at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2485)
	at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByFilterInternal(ObjectStore.java:2639)
	at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByFilter(ObjectStore.java:2428)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:101)
	at com.sun.proxy.$Proxy175.getPartitionsByFilter(Unknown Source)
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_filter(HiveMetaStore.java:4436)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:140)
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:99)
	at com.sun.proxy.$Proxy177.get_partitions_by_filter(Unknown Source)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByFilter(HiveMetaStoreClient.java:1155)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
	at com.sun.proxy.$Proxy178.listPartitionsByFilter(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByFilter(Hive.java:2345)
	... 63 more
[info] - 2.0: getPartitionsByFilter returns all partitions when hive.metastore.try.direct.sql=false (35 seconds, 880 milliseconds)
[info] - 2.0: getPartitionsByFilter: ds=20170101 (383 milliseconds)
[info] - 2.0: getPartitionsByFilter: ds=(20170101 + 1) and h=0 (183 milliseconds)
[info] - 2.0: getPartitionsByFilter: chunk='aa' (198 milliseconds)
[info] - 2.0: getPartitionsByFilter: 20170101=ds (136 milliseconds)
[info] - 2.0: getPartitionsByFilter: ds=20170101 and h=10 (189 milliseconds)
[info] - 2.0: getPartitionsByFilter: ds=20170101 or ds=20170102 (232 milliseconds)
[info] - 2.0: getPartitionsByFilter: ds in (20170102, 20170103) (using IN expression) (183 milliseconds)
[info] - 2.0: getPartitionsByFilter: ds in (20170102, 20170103) (using INSET expression) (73 milliseconds)
[info] - 2.0: getPartitionsByFilter: chunk in ('ab', 'ba') (using IN expression) (178 milliseconds)
[info] - 2.0: getPartitionsByFilter: chunk in ('ab', 'ba') (using INSET expression) (65 milliseconds)
[info] - 2.0: getPartitionsByFilter: (ds=20170101 and h>=8) or (ds=20170102 and h<8) (189 milliseconds)
[info] - 2.0: getPartitionsByFilter: (ds=20170101 and h>=8) or (ds=20170102 and h<(7+1)) (163 milliseconds)
[info] - 2.0: getPartitionsByFilter: chunk in ('ab', 'ba') and ((ds=20170101 and h>=8) or (ds=20170102 and h<8)) (227 milliseconds)
[info] HiveClientSuite(2.1):
20:25:07.279 WARN org.apache.hadoop.hive.metastore.ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 2.1.0
20:25:07.279 WARN org.apache.hadoop.hive.metastore.ObjectStore: setMetaStoreSchemaVersion called but recording version is disabled: version = 2.1.0, comment = Set by MetaStore jenkins@192.168.10.25
20:25:07.294 WARN org.apache.hadoop.hive.metastore.ObjectStore: Failed to get database default, returning NoSuchObjectException
20:25:08.107 WARN org.apache.hadoop.hive.ql.session.SessionState: METASTORE_FILTER_HOOK will be ignored, since hive.security.authorization.manager is set to instance of HiveAuthorizerFactory.
20:25:52.393 WARN org.apache.hadoop.hive.metastore.ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 2.1.0
20:25:52.393 WARN org.apache.hadoop.hive.metastore.ObjectStore: setMetaStoreSchemaVersion called but recording version is disabled: version = 2.1.0, comment = Set by MetaStore jenkins@192.168.10.25
20:25:53.565 WARN org.apache.hadoop.hive.ql.session.SessionState: METASTORE_FILTER_HOOK will be ignored, since hive.security.authorization.manager is set to instance of HiveAuthorizerFactory.
20:26:32.372 WARN org.apache.spark.sql.hive.client.Shim_v2_1: Caught Hive MetaException attempting to get partition metadata by filter from Hive. Falling back to fetching all partition metadata, which will degrade performance. Modifying your Hive metastore configuration to set hive.metastore.try.direct.sql to true may resolve this problem.
java.lang.reflect.InvocationTargetException
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.spark.sql.hive.client.Shim_v0_13.getPartitionsByFilter(HiveShim.scala:721)
	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionsByFilter$1.apply(HiveClientImpl.scala:650)
	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$getPartitionsByFilter$1.apply(HiveClientImpl.scala:648)
	at org.apache.spark.sql.hive.client.HiveClientImpl$$anonfun$withHiveState$1.apply(HiveClientImpl.scala:273)
	at org.apache.spark.sql.hive.client.HiveClientImpl.liftedTree1$1(HiveClientImpl.scala:211)
	at org.apache.spark.sql.hive.client.HiveClientImpl.retryLocked(HiveClientImpl.scala:210)
	at org.apache.spark.sql.hive.client.HiveClientImpl.withHiveState(HiveClientImpl.scala:256)
	at org.apache.spark.sql.hive.client.HiveClientImpl.getPartitionsByFilter(HiveClientImpl.scala:648)
	at org.apache.spark.sql.hive.client.HiveClientSuite$$anonfun$15.apply(HiveClientSuite.scala:75)
	at org.apache.spark.sql.hive.client.HiveClientSuite$$anonfun$15.apply(HiveClientSuite.scala:73)
	at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
	at org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:289)
	at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196)
	at org.scalatest.FunSuite.runTest(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:396)
	at org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:384)
	at scala.collection.immutable.List.foreach(List.scala:381)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:384)
	at org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:379)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:461)
	at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229)
	at org.scalatest.FunSuite.runTests(FunSuite.scala:1560)
	at org.scalatest.Suite$class.run(Suite.scala:1147)
	at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:521)
	at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
	at org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
	at org.scalatest.Suite$class.callExecuteOnSuite$1(Suite.scala:1210)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1257)
	at org.scalatest.Suite$$anonfun$runNestedSuites$1.apply(Suite.scala:1255)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at org.scalatest.Suite$class.runNestedSuites(Suite.scala:1255)
	at org.apache.spark.sql.hive.client.HiveClientSuites.runNestedSuites(HiveClientSuites.scala:24)
	at org.scalatest.Suite$class.run(Suite.scala:1144)
	at org.apache.spark.sql.hive.client.HiveClientSuites.run(HiveClientSuites.scala:24)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
	at sbt.ForkMain$Run$2.call(ForkMain.java:296)
	at sbt.ForkMain$Run$2.call(ForkMain.java:286)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: MetaException(message:Filtering is supported only on partition keys of type string)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$FilterBuilder.setError(ExpressionTree.java:184)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$LeafNode.getJdoFilterPushdownParam(ExpressionTree.java:439)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$LeafNode.generateJDOFilterOverPartitions(ExpressionTree.java:356)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree$LeafNode.generateJDOFilter(ExpressionTree.java:278)
	at org.apache.hadoop.hive.metastore.parser.ExpressionTree.generateJDOFilterFragment(ExpressionTree.java:583)
	at org.apache.hadoop.hive.metastore.ObjectStore.makeQueryFilterString(ObjectStore.java:3029)
	at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsViaOrmFilter(ObjectStore.java:2581)
	at org.apache.hadoop.hive.metastore.ObjectStore.access$500(ObjectStore.java:176)
	at org.apache.hadoop.hive.metastore.ObjectStore$6.getJdoResult(ObjectStore.java:2962)
	at org.apache.hadoop.hive.metastore.ObjectStore$6.getJdoResult(ObjectStore.java:2946)
	at org.apache.hadoop.hive.metastore.ObjectStore$GetHelper.run(ObjectStore.java:2771)
	at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByFilterInternal(ObjectStore.java:2964)
	at org.apache.hadoop.hive.metastore.ObjectStore.getPartitionsByFilter(ObjectStore.java:2703)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:101)
	at com.sun.proxy.$Proxy185.getPartitionsByFilter(Unknown Source)
	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.get_partitions_by_filter(HiveMetaStore.java:4752)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:140)
	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:99)
	at com.sun.proxy.$Proxy187.get_partitions_by_filter(Unknown Source)
	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.listPartitionsByFilter(HiveMetaStoreClient.java:1222)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:154)
	at com.sun.proxy.$Proxy188.listPartitionsByFilter(Unknown Source)
	at org.apache.hadoop.hive.ql.metadata.Hive.getPartitionsByFilter(Hive.java:2457)
	... 63 more
[info] - 2.1: getPartitionsByFilter returns all partitions when hive.metastore.try.direct.sql=false (47 seconds, 198 milliseconds)
[info] - 2.1: getPartitionsByFilter: ds=20170101 (357 milliseconds)
[info] - 2.1: getPartitionsByFilter: ds=(20170101 + 1) and h=0 (162 milliseconds)
[info] - 2.1: getPartitionsByFilter: chunk='aa' (170 milliseconds)
[info] - 2.1: getPartitionsByFilter: 20170101=ds (133 milliseconds)
[info] - 2.1: getPartitionsByFilter: ds=20170101 and h=10 (176 milliseconds)
[info] - 2.1: getPartitionsByFilter: ds=20170101 or ds=20170102 (234 milliseconds)
[info] - 2.1: getPartitionsByFilter: ds in (20170102, 20170103) (using IN expression) (191 milliseconds)
[info] - 2.1: getPartitionsByFilter: ds in (20170102, 20170103) (using INSET expression) (67 milliseconds)
[info] - 2.1: getPartitionsByFilter: chunk in ('ab', 'ba') (using IN expression) (156 milliseconds)
[info] - 2.1: getPartitionsByFilter: chunk in ('ab', 'ba') (using INSET expression) (55 milliseconds)
[info] - 2.1: getPartitionsByFilter: (ds=20170101 and h>=8) or (ds=20170102 and h<8) (153 milliseconds)
[info] - 2.1: getPartitionsByFilter: (ds=20170101 and h>=8) or (ds=20170102 and h<(7+1)) (141 milliseconds)
[info] - 2.1: getPartitionsByFilter: chunk in ('ab', 'ba') and ((ds=20170101 and h>=8) or (ds=20170102 and h<8)) (191 milliseconds)
[info] HiveUDAFSuite:
[info] - built-in Hive UDAF (410 milliseconds)
[info] - customized Hive UDAF (332 milliseconds)
[info] - call JAVA UDAF (653 milliseconds)
[info] - non-deterministic children expressions of UDAF (45 milliseconds)
[info] Test run started
[info] Test org.apache.spark.sql.hive.JavaDataFrameSuite.testUDAF started
[info] Test org.apache.spark.sql.hive.JavaDataFrameSuite.saveTableAndQueryIt started
[info] Test run finished: 0 failed, 0 ignored, 2 total, 1.503s
[info] Test run started
[info] Test org.apache.spark.sql.hive.JavaMetastoreDataSourcesSuite.saveExternalTableAndQueryIt started
20:26:39.699 WARN org.apache.spark.sql.hive.test.TestHiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider org.apache.spark.sql.json. Persisting data source table `default`.`javasavedtable` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
20:26:40.135 WARN org.apache.spark.sql.hive.test.TestHiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider org.apache.spark.sql.json. Persisting data source table `default`.`externaltable` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
[info] Test org.apache.spark.sql.hive.JavaMetastoreDataSourcesSuite.saveTableAndQueryIt started
20:26:40.827 WARN org.apache.spark.sql.hive.test.TestHiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider org.apache.spark.sql.json. Persisting data source table `default`.`javasavedtable` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
[info] Test org.apache.spark.sql.hive.JavaMetastoreDataSourcesSuite.saveExternalTableWithSchemaAndQueryIt started
20:26:41.232 WARN org.apache.spark.sql.hive.test.TestHiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider org.apache.spark.sql.json. Persisting data source table `default`.`javasavedtable` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
20:26:41.424 WARN org.apache.spark.sql.hive.test.TestHiveExternalCatalog: Couldn't find corresponding Hive SerDe for data source provider org.apache.spark.sql.json. Persisting data source table `default`.`externaltable` into Hive metastore in Spark SQL specific format, which is NOT compatible with Hive.
[info] Test run finished: 0 failed, 0 ignored, 3 total, 2.4s
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 31 seconds.
[info] Total number of tests run: 29
[info] Suites: completed 3, aborted 0
[info] Tests: succeeded 29, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 29, Failed 0, Errors 0, Passed 29
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 27 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 17 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 30 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 31 seconds.
[info] Total number of tests run: 5
[info] Suites: completed 1, aborted 0
[info] Tests: succeeded 5, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 5, Failed 0, Errors 0, Passed 5
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 27 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 27 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 27 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 16 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 31 seconds.
[info] Total number of tests run: 19
[info] Suites: completed 1, aborted 0
[info] Tests: succeeded 19, failed 0, canceled 0, ignored 1, pending 0
[info] All tests passed.
[info] Passed: Total 72, Failed 0, Errors 0, Passed 72, Ignored 1
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 31 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] Passed: Total 101, Failed 0, Errors 0, Passed 100, Skipped 1
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 31 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] Passed: Total 37, Failed 0, Errors 0, Passed 37
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 30 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] Passed: Total 41, Failed 0, Errors 0, Passed 41
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 25 seconds.
[info] Total number of tests run: 4
[info] Suites: completed 2, aborted 0
[info] Tests: succeeded 4, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 6, Failed 0, Errors 0, Passed 6
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 25 seconds.
[info] Total number of tests run: 2025
[info] Suites: completed 208, aborted 0
[info] Tests: succeeded 2025, failed 0, canceled 0, ignored 8, pending 0
[info] All tests passed.
[info] Passed: Total 2262, Failed 0, Errors 0, Passed 2262, Ignored 8
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 25 seconds.
[info] Total number of tests run: 14
[info] Suites: completed 2, aborted 0
[info] Tests: succeeded 14, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 18, Failed 0, Errors 0, Passed 18
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 25 seconds.
[info] Total number of tests run: 107
[info] Suites: completed 19, aborted 0
[info] Tests: succeeded 107, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 107, Failed 0, Errors 0, Passed 107
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 25 seconds.
[info] Total number of tests run: 78
[info] Suites: completed 9, aborted 0
[info] Tests: succeeded 78, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 78, Failed 0, Errors 0, Passed 78
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 25 seconds.
[info] Total number of tests run: 74
[info] Suites: completed 14, aborted 0
[info] Tests: succeeded 74, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 74, Failed 0, Errors 0, Passed 74
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 25 seconds.
[info] Total number of tests run: 331
[info] Suites: completed 40, aborted 0
[info] Tests: succeeded 331, failed 0, canceled 0, ignored 1, pending 0
[info] All tests passed.
[info] Passed: Total 431, Failed 0, Errors 0, Passed 431, Ignored 1
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 25 seconds.
[info] Total number of tests run: 51
[info] Suites: completed 7, aborted 1
[info] Tests: succeeded 47, failed 4, canceled 0, ignored 0, pending 0
[info] *** 1 SUITE ABORTED ***
[info] *** 4 TESTS FAILED ***
[error] Error: Total 58, Failed 4, Errors 1, Passed 53
[error] Failed tests:
[error] 	org.apache.spark.streaming.kinesis.WithAggregationKinesisStreamSuite
[error] Error during tests:
[error] 	org.apache.spark.streaming.kinesis.WithAggregationKinesisBackedBlockRDDSuite
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 31 seconds.
[info] Total number of tests run: 84
[info] Suites: completed 8, aborted 0
[info] Tests: succeeded 84, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 84, Failed 0, Errors 0, Passed 84
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 18 seconds.
[info] Total number of tests run: 32
[info] Suites: completed 3, aborted 0
[info] Tests: succeeded 32, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 32, Failed 0, Errors 0, Passed 32
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 18 seconds.
[info] Total number of tests run: 2478
[info] Suites: completed 155, aborted 0
[info] Tests: succeeded 2477, failed 1, canceled 0, ignored 2, pending 0
[info] *** 1 TEST FAILED ***
[error] Failed: Total 2502, Failed 1, Errors 0, Passed 2501, Ignored 2
[error] Failed tests:
[error] 	org.apache.spark.sql.catalyst.util.DateTimeUtilsSuite
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 16 seconds.
[info] Total number of tests run: 1242
[info] Suites: completed 179, aborted 0
[info] Tests: succeeded 1242, failed 0, canceled 0, ignored 7, pending 0
[info] All tests passed.
[info] Passed: Total 1360, Failed 0, Errors 0, Passed 1360, Ignored 7
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 13 seconds.
[info] Total number of tests run: 3710
[info] Suites: completed 208, aborted 0
[info] Tests: succeeded 3710, failed 0, canceled 0, ignored 55, pending 0
[info] All tests passed.
[info] Passed: Total 3793, Failed 0, Errors 0, Passed 3793, Ignored 55
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 13 seconds.
[info] Total number of tests run: 39
[info] Suites: completed 9, aborted 0
[info] Tests: succeeded 39, failed 0, canceled 0, ignored 2, pending 0
[info] All tests passed.
[info] Passed: Total 39, Failed 0, Errors 0, Passed 39, Ignored 2
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 14 seconds.
[info] Total number of tests run: 66
[info] Suites: completed 9, aborted 0
[info] Tests: succeeded 66, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 66, Failed 0, Errors 0, Passed 66
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 12 seconds.
[info] Total number of tests run: 2821
[info] Suites: completed 88, aborted 1
[info] Tests: succeeded 2821, failed 0, canceled 0, ignored 596, pending 0
[info] *** 1 SUITE ABORTED ***
[error] Error: Total 2827, Failed 0, Errors 1, Passed 2826, Ignored 596
[error] Error during tests:
[error] 	org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 32 seconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 0
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] No tests were executed.
[info] Passed: Total 73, Failed 0, Errors 0, Passed 73
[info] ScalaTest
[info] Run completed in 2 hours, 47 minutes, 27 seconds.
[info] Total number of tests run: 17
[info] Suites: completed 5, aborted 0
[info] Tests: succeeded 17, failed 0, canceled 0, ignored 0, pending 0
[info] All tests passed.
[info] Passed: Total 20, Failed 0, Errors 0, Passed 20
[error] (catalyst/test:test) sbt.TestsFailedException: Tests unsuccessful
[error] (streaming-kinesis-asl/test:test) sbt.TestsFailedException: Tests unsuccessful
[error] (hive/test:test) sbt.TestsFailedException: Tests unsuccessful
[error] Total time: 10059 s, completed Oct 21, 2019 8:26:50 PM
[error] running /home/jenkins/workspace/SparkPullRequestBuilder@2/build/sbt -Phadoop-2.6 -Pflume -Phive-thriftserver -Pyarn -Pkafka-0-8 -Phive -Pkinesis-asl -Pmesos test ; received return code 1
Attempting to post to Github...
 > Post successful.
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Test FAILed.
Refer to this link for build results (access rights to CI server needed): 
https://amplab.cs.berkeley.edu/jenkins//job/SparkPullRequestBuilder/112418/
Test FAILed.
Finished: FAILURE