Regression
org.apache.spark.sql.kafka010.KafkaMicroBatchV2SourceSuite.subscribing topic by pattern with topic deletions
Failing for the past 1 build
(Since
#5035 )

Error Message
org.scalatest.exceptions.TestFailedException: Timed out waiting for stream: The code passed to eventually never returned normally. Attempted 300 times over 1.0010006424 minutes. Last failure message: KafkaTestUtils.this.zkClient.isTopicMarkedForDeletion(topic) was true topic is still marked for deletion. org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439) org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391) org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308) org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307) org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479) org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletionWithRetries(KafkaTestUtils.scala:636) org.apache.spark.sql.kafka010.KafkaTestUtils.deleteTopic(KafkaTestUtils.scala:428) org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase.$anonfun$new$20(KafkaMicroBatchSourceSuite.scala:379) Caused by: KafkaTestUtils.this.zkClient.isTopicMarkedForDeletion(topic) was true topic is still marked for deletion org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:530) org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:529) org.scalatest.Assertions$.newAssertionFailedException(Assertions.scala:1389) org.scalatest.Assertions$AssertionsHelper.macroAssert(Assertions.scala:503) org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletion(KafkaTestUtils.scala:608) org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$verifyTopicDeletionWithRetries$1(KafkaTestUtils.scala:638) scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395) org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439) == Progress == AssertOnQuery(<condition>, ) AddKafkaData(topics = Set(topic-31-seems), data = WrappedArray(1, 2, 3), message = ) CheckAnswer: [2],[3],[4] => Assert(<condition>, ) AddKafkaData(topics = Set(topic-31-bad), data = WrappedArray(4, 5, 6), message = ) CheckAnswer: [2],[3],[4],[5],[6],[7] == Stream == Output Mode: Append Stream state: {KafkaV2[SubscribePattern[topic-31-.*]]: {}} Thread state: alive Thread stack trace: java.lang.Thread.sleep(Native Method) org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:241) org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5283/1670594217.apply$mcZ$sp(Unknown Source) org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57) org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:185) org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:333) org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244) == Sink == 0: 1: [2] 2: [3] 3: [4] 4: == Plan == == Parsed Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@59017f31 +- SerializeFromObject [input[0, int, false] AS value#25036] +- MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$5499/1396712557@52da9978, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#25035: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#25034: scala.Tuple2 +- Project [cast(key#25010 as string) AS key#25024, cast(value#25011 as string) AS value#25025] +- StreamingDataSourceV2Relation [key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@bf1ff14, KafkaV2[SubscribePattern[topic-31-.*]], {"topic-31-seems":{"2":1,"4":0,"1":0,"3":2,"0":1}}, {} == Analyzed Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@59017f31 +- SerializeFromObject [input[0, int, false] AS value#25036] +- MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$5499/1396712557@52da9978, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#25035: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#25034: scala.Tuple2 +- Project [cast(key#25010 as string) AS key#25024, cast(value#25011 as string) AS value#25025] +- StreamingDataSourceV2Relation [key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@bf1ff14, KafkaV2[SubscribePattern[topic-31-.*]], {"topic-31-seems":{"2":1,"4":0,"1":0,"3":2,"0":1}}, {} == Optimized Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@59017f31 +- SerializeFromObject [input[0, int, false] AS value#25036] +- MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$5499/1396712557@52da9978, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#25035: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#25034: scala.Tuple2 +- Project [cast(key#25010 as string) AS key#25024, cast(value#25011 as string) AS value#25025] +- StreamingDataSourceV2Relation [key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@bf1ff14, KafkaV2[SubscribePattern[topic-31-.*]], {"topic-31-seems":{"2":1,"4":0,"1":0,"3":2,"0":1}}, {} == Physical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@59017f31 +- *(1) SerializeFromObject [input[0, int, false] AS value#25036] +- *(1) MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$5499/1396712557@52da9978, obj#25035: int +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#25034: scala.Tuple2 +- *(1) Project [cast(key#25010 as string) AS key#25024, cast(value#25011 as string) AS value#25025] +- *(1) Project [key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016] +- MicroBatchScan[key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016] class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan
Stacktrace
sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: Timed out waiting for stream: The code passed to eventually never returned normally. Attempted 300 times over 1.0010006424 minutes. Last failure message: KafkaTestUtils.this.zkClient.isTopicMarkedForDeletion(topic) was true topic is still marked for deletion. org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:432) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439) org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:391) org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:308) org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:307) org.scalatest.concurrent.Eventually$.eventually(Eventually.scala:479) org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletionWithRetries(KafkaTestUtils.scala:636) org.apache.spark.sql.kafka010.KafkaTestUtils.deleteTopic(KafkaTestUtils.scala:428) org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase.$anonfun$new$20(KafkaMicroBatchSourceSuite.scala:379) Caused by: KafkaTestUtils.this.zkClient.isTopicMarkedForDeletion(topic) was true topic is still marked for deletion org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:530) org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:529) org.scalatest.Assertions$.newAssertionFailedException(Assertions.scala:1389) org.scalatest.Assertions$AssertionsHelper.macroAssert(Assertions.scala:503) org.apache.spark.sql.kafka010.KafkaTestUtils.verifyTopicDeletion(KafkaTestUtils.scala:608) org.apache.spark.sql.kafka010.KafkaTestUtils.$anonfun$verifyTopicDeletionWithRetries$1(KafkaTestUtils.scala:638) scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) org.scalatest.concurrent.Eventually.makeAValiantAttempt$1(Eventually.scala:395) org.scalatest.concurrent.Eventually.tryTryAgain$1(Eventually.scala:409) org.scalatest.concurrent.Eventually.eventually(Eventually.scala:439) == Progress == AssertOnQuery(<condition>, ) AddKafkaData(topics = Set(topic-31-seems), data = WrappedArray(1, 2, 3), message = ) CheckAnswer: [2],[3],[4] => Assert(<condition>, ) AddKafkaData(topics = Set(topic-31-bad), data = WrappedArray(4, 5, 6), message = ) CheckAnswer: [2],[3],[4],[5],[6],[7] == Stream == Output Mode: Append Stream state: {KafkaV2[SubscribePattern[topic-31-.*]]: {}} Thread state: alive Thread stack trace: java.lang.Thread.sleep(Native Method) org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:241) org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$5283/1670594217.apply$mcZ$sp(Unknown Source) org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57) org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:185) org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:333) org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244) == Sink == 0: 1: [2] 2: [3] 3: [4] 4: == Plan == == Parsed Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@59017f31 +- SerializeFromObject [input[0, int, false] AS value#25036] +- MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$5499/1396712557@52da9978, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#25035: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#25034: scala.Tuple2 +- Project [cast(key#25010 as string) AS key#25024, cast(value#25011 as string) AS value#25025] +- StreamingDataSourceV2Relation [key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@bf1ff14, KafkaV2[SubscribePattern[topic-31-.*]], {"topic-31-seems":{"2":1,"4":0,"1":0,"3":2,"0":1}}, {} == Analyzed Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@59017f31 +- SerializeFromObject [input[0, int, false] AS value#25036] +- MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$5499/1396712557@52da9978, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#25035: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#25034: scala.Tuple2 +- Project [cast(key#25010 as string) AS key#25024, cast(value#25011 as string) AS value#25025] +- StreamingDataSourceV2Relation [key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@bf1ff14, KafkaV2[SubscribePattern[topic-31-.*]], {"topic-31-seems":{"2":1,"4":0,"1":0,"3":2,"0":1}}, {} == Optimized Logical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@59017f31 +- SerializeFromObject [input[0, int, false] AS value#25036] +- MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$5499/1396712557@52da9978, class scala.Tuple2, [StructField(_1,StringType,true), StructField(_2,StringType,true)], obj#25035: int +- DeserializeToObject newInstance(class scala.Tuple2), obj#25034: scala.Tuple2 +- Project [cast(key#25010 as string) AS key#25024, cast(value#25011 as string) AS value#25025] +- StreamingDataSourceV2Relation [key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016], org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan@bf1ff14, KafkaV2[SubscribePattern[topic-31-.*]], {"topic-31-seems":{"2":1,"4":0,"1":0,"3":2,"0":1}}, {} == Physical Plan == WriteToDataSourceV2 org.apache.spark.sql.execution.streaming.sources.MicroBatchWrite@59017f31 +- *(1) SerializeFromObject [input[0, int, false] AS value#25036] +- *(1) MapElements org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase$$Lambda$5499/1396712557@52da9978, obj#25035: int +- *(1) DeserializeToObject newInstance(class scala.Tuple2), obj#25034: scala.Tuple2 +- *(1) Project [cast(key#25010 as string) AS key#25024, cast(value#25011 as string) AS value#25025] +- *(1) Project [key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016] +- MicroBatchScan[key#25010, value#25011, topic#25012, partition#25013, offset#25014L, timestamp#25015, timestampType#25016] class org.apache.spark.sql.kafka010.KafkaSourceProvider$KafkaScan at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:530) at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:529) at org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1560) at org.scalatest.Assertions.fail(Assertions.scala:1091) at org.scalatest.Assertions.fail$(Assertions.scala:1087) at org.scalatest.FunSuite.fail(FunSuite.scala:1560) at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:452) at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:788) at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:764) at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:334) at org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaMicroBatchSourceSuite.scala:53) at org.apache.spark.sql.kafka010.KafkaMicroBatchSourceSuiteBase.$anonfun$new$18(KafkaMicroBatchSourceSuite.scala:384) at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at org.scalatest.Transformer.apply(Transformer.scala:22) at org.scalatest.Transformer.apply(Transformer.scala:20) at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:151) at org.scalatest.FunSuiteLike.invokeWithFixture$1(FunSuiteLike.scala:184) at org.scalatest.FunSuiteLike.$anonfun$runTest$1(FunSuiteLike.scala:196) at org.scalatest.SuperEngine.runTestImpl(Engine.scala:286) at org.scalatest.FunSuiteLike.runTest(FunSuiteLike.scala:196) at org.scalatest.FunSuiteLike.runTest$(FunSuiteLike.scala:178) at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:58) at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:221) at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:214) at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:58) at org.scalatest.FunSuiteLike.$anonfun$runTests$1(FunSuiteLike.scala:229) at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:393) at scala.collection.immutable.List.foreach(List.scala:392) at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381) at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:376) at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458) at org.scalatest.FunSuiteLike.runTests(FunSuiteLike.scala:229) at org.scalatest.FunSuiteLike.runTests$(FunSuiteLike.scala:228) at org.scalatest.FunSuite.runTests(FunSuite.scala:1560) at org.scalatest.Suite.run(Suite.scala:1124) at org.scalatest.Suite.run$(Suite.scala:1106) at org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560) at org.scalatest.FunSuiteLike.$anonfun$run$1(FunSuiteLike.scala:233) at org.scalatest.SuperEngine.runImpl(Engine.scala:518) at org.scalatest.FunSuiteLike.run(FunSuiteLike.scala:233) at org.scalatest.FunSuiteLike.run$(FunSuiteLike.scala:232) at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:58) at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:58) at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317) at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510) at sbt.ForkMain$Run$2.call(ForkMain.java:296) at sbt.ForkMain$Run$2.call(ForkMain.java:286) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748)