Regression

org.apache.spark.sql.streaming.StreamingOuterJoinSuite.SPARK-26187 self right outer join should not return outer nulls for already matched rows

Failing for the past 1 build (Since Failed#5037 )
Took 1.5 sec.

Error Message

org.apache.spark.sql.streaming.StreamingQueryException: Query [id = 627246a8-df99-4602-b20f-7bd24d2eb420, runId = af7920ef-b653-4ac2-8587-322664f44b81] terminated with exception: Cannot call methods on a stopped SparkContext. This stopped SparkContext was created at:  org.apache.spark.sql.streaming.StreamingOuterJoinSuite.beforeAll(StreamingJoinSuite.scala:484) org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212) org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) org.apache.spark.sql.streaming.StreamingOuterJoinSuite.org$scalatest$BeforeAndAfter$$super$run(StreamingJoinSuite.scala:484) org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258) org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256) org.apache.spark.sql.streaming.StreamingOuterJoinSuite.run(StreamingJoinSuite.scala:484) org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317) org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510) sbt.ForkMain$Run$2.call(ForkMain.java:296) sbt.ForkMain$Run$2.call(ForkMain.java:286) java.util.concurrent.FutureTask.run(FutureTask.java:266) java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) java.lang.Thread.run(Thread.java:748)  The currently active SparkContext was created at:  (No active SparkContext.)          

Stacktrace

sbt.ForkMain$ForkError: org.apache.spark.sql.streaming.StreamingQueryException: Query [id = 627246a8-df99-4602-b20f-7bd24d2eb420, runId = af7920ef-b653-4ac2-8587-322664f44b81] terminated with exception: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.streaming.StreamingOuterJoinSuite.beforeAll(StreamingJoinSuite.scala:484)
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
org.apache.spark.sql.streaming.StreamingOuterJoinSuite.org$scalatest$BeforeAndAfter$$super$run(StreamingJoinSuite.scala:484)
org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
org.apache.spark.sql.streaming.StreamingOuterJoinSuite.run(StreamingJoinSuite.scala:484)
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
sbt.ForkMain$Run$2.call(ForkMain.java:296)
sbt.ForkMain$Run$2.call(ForkMain.java:286)
java.util.concurrent.FutureTask.run(FutureTask.java:266)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:748)

The currently active SparkContext was created at:

(No active SparkContext.)
         
	at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:354)
	at org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:244)
Caused by: sbt.ForkMain$ForkError: java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.streaming.StreamingOuterJoinSuite.beforeAll(StreamingJoinSuite.scala:484)
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
org.apache.spark.sql.streaming.StreamingOuterJoinSuite.org$scalatest$BeforeAndAfter$$super$run(StreamingJoinSuite.scala:484)
org.scalatest.BeforeAndAfter.run(BeforeAndAfter.scala:258)
org.scalatest.BeforeAndAfter.run$(BeforeAndAfter.scala:256)
org.apache.spark.sql.streaming.StreamingOuterJoinSuite.run(StreamingJoinSuite.scala:484)
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:317)
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:510)
sbt.ForkMain$Run$2.call(ForkMain.java:296)
sbt.ForkMain$Run$2.call(ForkMain.java:286)
java.util.concurrent.FutureTask.run(FutureTask.java:266)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
java.lang.Thread.run(Thread.java:748)

The currently active SparkContext was created at:

(No active SparkContext.)
         
	at org.apache.spark.SparkContext.assertNotStopped(SparkContext.scala:111)
	at org.apache.spark.SparkContext.broadcast(SparkContext.scala:1471)
	at org.apache.spark.sql.execution.streaming.StreamingSymmetricHashJoinExec.<init>(StreamingSymmetricHashJoinExec.scala:171)
	at org.apache.spark.sql.execution.streaming.StreamingSymmetricHashJoinExec.<init>(StreamingSymmetricHashJoinExec.scala:151)
	at org.apache.spark.sql.execution.SparkStrategies$StreamingJoinStrategy$.apply(SparkStrategies.scala:495)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$1(QueryPlanner.scala:63)
	at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:484)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:490)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:489)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93)
	at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:68)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$3(QueryPlanner.scala:78)
	at scala.collection.TraversableOnce.$anonfun$foldLeft$1(TraversableOnce.scala:162)
	at scala.collection.TraversableOnce.$anonfun$foldLeft$1$adapted(TraversableOnce.scala:162)
	at scala.collection.Iterator.foreach(Iterator.scala:941)
	at scala.collection.Iterator.foreach$(Iterator.scala:941)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
	at scala.collection.TraversableOnce.foldLeft(TraversableOnce.scala:162)
	at scala.collection.TraversableOnce.foldLeft$(TraversableOnce.scala:160)
	at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1429)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$2(QueryPlanner.scala:75)
	at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:484)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:490)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93)
	at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:68)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$3(QueryPlanner.scala:78)
	at scala.collection.TraversableOnce.$anonfun$foldLeft$1(TraversableOnce.scala:162)
	at scala.collection.TraversableOnce.$anonfun$foldLeft$1$adapted(TraversableOnce.scala:162)
	at scala.collection.Iterator.foreach(Iterator.scala:941)
	at scala.collection.Iterator.foreach$(Iterator.scala:941)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
	at scala.collection.TraversableOnce.foldLeft(TraversableOnce.scala:162)
	at scala.collection.TraversableOnce.foldLeft$(TraversableOnce.scala:160)
	at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1429)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$2(QueryPlanner.scala:75)
	at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:484)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:490)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93)
	at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:68)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$3(QueryPlanner.scala:78)
	at scala.collection.TraversableOnce.$anonfun$foldLeft$1(TraversableOnce.scala:162)
	at scala.collection.TraversableOnce.$anonfun$foldLeft$1$adapted(TraversableOnce.scala:162)
	at scala.collection.Iterator.foreach(Iterator.scala:941)
	at scala.collection.Iterator.foreach$(Iterator.scala:941)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
	at scala.collection.TraversableOnce.foldLeft(TraversableOnce.scala:162)
	at scala.collection.TraversableOnce.foldLeft$(TraversableOnce.scala:160)
	at scala.collection.AbstractIterator.foldLeft(Iterator.scala:1429)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.$anonfun$plan$2(QueryPlanner.scala:75)
	at scala.collection.Iterator$$anon$11.nextCur(Iterator.scala:484)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:490)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:93)
	at org.apache.spark.sql.execution.SparkStrategies.plan(SparkStrategies.scala:68)
	at org.apache.spark.sql.execution.QueryExecution$.createSparkPlan(QueryExecution.scala:317)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$sparkPlan$1(QueryExecution.scala:87)
	at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:120)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:120)
	at org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:87)
	at org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:85)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$executedPlan$1(QueryExecution.scala:95)
	at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
	at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:120)
	at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:764)
	at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:120)
	at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:95)
	at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:92)
	at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$14(MicroBatchExecution.scala:563)
	at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:352)
	at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:350)
	at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
	at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:553)
	at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:223)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
	at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:352)
	at org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:350)
	at org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:68)
	at org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:191)
	at org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:57)
	at org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:185)
	at org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:333)
	... 1 more