Skip to content

Remove redundant escape characters, remove unrelated changes #331

Remove redundant escape characters, remove unrelated changes

Remove redundant escape characters, remove unrelated changes #331

GitHub Actions / Report test results failed Nov 20, 2023 in 0s

46167 tests run, 868 skipped, 1 failed.

Annotations

Check failure on line 1 in RocksDBStateStoreStreamingAggregationSuite

See this annotation in the file changed.

@github-actions github-actions / Report test results

RocksDBStateStoreStreamingAggregationSuite.changing schema of state when restarting query - state format version 1 (RocksDBStateStore)

org.scalatest.exceptions.TestFailedException: 
Timed out waiting for stream: The code passed to failAfter did not complete within 120 seconds.
java.base/java.lang.Thread.getStackTrace(Thread.java:1619)
	org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:277)
	org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
	org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
	org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:481)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:480)
	scala.collection.mutable.HashMap$Node.foreach(HashMap.scala:642)
	scala.collection.mutable.HashMap.foreach(HashMap.scala:504)
	org.apache.spark.sql.streaming.StreamTest.fetchStreamAnswer$1(StreamTest.scala:480)

	Caused by: 	null
	java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1764)
		org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:481)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:482)
		scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
		org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
		org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
		org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
		org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
		org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:481)


== Progress ==
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@4587dfa5,Map(),/home/runner/work/spark/spark/target/tmp/spark-66af3ef5-c6f3-4baf-baad-8c55b424d347)
   AddData to MemoryStream[value#18410]: 1,11
=> CheckLastBatch: [1,12,6.0,11]
   StopStream

== Stream ==
Output Mode: Update
Stream state: {}
Thread state: alive
Thread stack trace: [email protected]/jdk.internal.misc.Unsafe.park(Native Method)
[email protected]/java.util.concurrent.locks.LockSupport.park(LockSupport.java:211)
[email protected]/java.util.concurrent.locks.AbstractQueuedSynchronizer.acquire(AbstractQueuedSynchronizer.java:715)
[email protected]/java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1047)
app//scala.concurrent.impl.Promise$DefaultPromise.tryAwait0(Promise.scala:243)
app//scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:255)
app//scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:104)
app//org.apache.spark.util.ThreadUtils$.awaitReady(ThreadUtils.scala:342)
app//org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:975)
app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2428)
app//org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2(WriteToDataSourceV2Exec.scala:386)
app//org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2$(WriteToDataSourceV2Exec.scala:360)
app//org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.writeWithV2(WriteToDataSourceV2Exec.scala:308)
app//org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.run(WriteToDataSourceV2Exec.scala:319)
app//org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)
app//org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)
app//org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)
app//org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:4402)
app//org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:3634)
app//org.apache.spark.sql.Dataset$$Lambda$2304/0x00007f07e92df830.apply(Unknown Source)
app//org.apache.spark.sql.Dataset.$anonfun$withAction$2(Dataset.scala:4392)
app//org.apache.spark.sql.Dataset$$Lambda$2316/0x00007f07e92e42d8.apply(Unknown Source)
app//org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:557)
app//org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:4390)
app//org.apache.spark.sql.Dataset$$Lambda$2305/0x00007f07e92dd000.apply(Unknown Source)
app//org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$6(SQLExecution.scala:150)
app//org.apache.spark.sql.execution.SQLExecution$$$Lambda$2260/0x00007f07e92c2780.apply(Unknown Source)
app//org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:241)
app//org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$1(SQLExecution.scala:116)
app//org.apache.spark.sql.execution.SQLExecution$$$Lambda$2246/0x00007f07e92bf578.apply(Unknown Source)
app//org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:907)
app//org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId0(SQLExecution.scala:72)
app//org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:196)
app//org.apache.spark.sql.Dataset.withAction(Dataset.scala:4390)
app//org.apache.spark.sql.Dataset.collect(Dataset.scala:3634)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$17(MicroBatchExecution.scala:783)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2245/0x00007f07e92bf2b8.apply(Unknown Source)
app//org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$6(SQLExecution.scala:150)
app//org.apache.spark.sql.execution.SQLExecution$$$Lambda$2260/0x00007f07e92c2780.apply(Unknown Source)
app//org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:241)
app//org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$1(SQLExecution.scala:116)
app//org.apache.spark.sql.execution.SQLExecution$$$Lambda$2246/0x00007f07e92bf578.apply(Unknown Source)
app//org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:907)
app//org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId0(SQLExecution.scala:72)
app//org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:196)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:771)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2243/0x00007f07e92bea68.apply(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:427)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:425)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:771)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:326)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$1945/0x00007f07e91e1918.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:427)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:425)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:289)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$1942/0x00007f07e91e0890.apply$mcZ$sp(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:279)
app//org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$1(StreamExecution.scala:311)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$Lambda$1933/0x00007f07e91d8578.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:907)
app//org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:289)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.$anonfun$run$1(StreamExecution.scala:211)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1$$Lambda$1929/0x00007f07e91d6dd0.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)


== Sink ==



== Plan ==
== Parsed Logical Plan ==
WriteToMicroBatchDataSource MemorySink, 0430d473-964d-4315-8149-df7d344ffeeb, Update, 0
+- Aggregate [id#18412], [id#18412, sum(value#18410) AS sum_value#18417L, avg(value#18410) AS avg_value#18418, max(value#18410) AS max_value#18419]
   +- Project [(value#18410 % 10) AS id#18412, value#18410]
      +- StreamingDataSourceV2Relation [value#18410], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@32c496bd, MemoryStream[value#18410], -1, 0

== Analyzed Logical Plan ==
WriteToMicroBatchDataSource MemorySink, 0430d473-964d-4315-8149-df7d344ffeeb, Update, 0
+- Aggregate [id#18412], [id#18412, sum(value#18410) AS sum_value#18417L, avg(value#18410) AS avg_value#18418, max(value#18410) AS max_value#18419]
   +- Project [(value#18410 % 10) AS id#18412, value#18410]
      +- StreamingDataSourceV2Relation [value#18410], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@32c496bd, MemoryStream[value#18410], -1, 0

== Optimized Logical Plan ==
WriteToDataSourceV2 MicroBatchWrite[epoch: 0, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@7e577f2d]
+- Aggregate [id#18412], [id#18412, sum(value#18410) AS sum_value#18417L, avg(value#18410) AS avg_value#18418, max(value#18410) AS max_value#18419]
   +- Project [(value#18410 % 10) AS id#18412, value#18410]
      +- StreamingDataSourceV2Relation [value#18410], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@32c496bd, MemoryStream[value#18410], -1, 0

== Physical Plan ==
WriteToDataSourceV2 MicroBatchWrite[epoch: 0, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@7e577f2d], org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$$Lambda$2189/0x00007f07e928d350@4d9d0ee4
+- *(4) HashAggregate(keys=[id#18412], functions=[sum(value#18410), avg(value#18410), max(value#18410)], output=[id#18412, sum_value#18417L, avg_value#18418, max_value#18419])
   +- StateStoreSave [id#18412], state info [ checkpoint = file:/home/runner/work/spark/spark/target/tmp/spark-66af3ef5-c6f3-4baf-baad-8c55b424d347/state, runId = 251da6d0-95d0-48cc-99e3-5c08eb05de94, opId = 0, ver = 0, numPartitions = 1], Update, 0, 0, 1
      +- *(3) HashAggregate(keys=[id#18412], functions=[merge_sum(value#18410), merge_avg(value#18410), merge_max(value#18410)], output=[id#18412, sum#18449L, sum#18452, count#18453L, max#18455])
         +- StateStoreRestore [id#18412], state info [ checkpoint = file:/home/runner/work/spark/spark/target/tmp/spark-66af3ef5-c6f3-4baf-baad-8c55b424d347/state, runId = 251da6d0-95d0-48cc-99e3-5c08eb05de94, opId = 0, ver = 0, numPartitions = 1], 1
            +- *(2) HashAggregate(keys=[id#18412], functions=[merge_sum(value#18410), merge_avg(value#18410), merge_max(value#18410)], output=[id#18412, sum#18449L, sum#18452, count#18453L, max#18455])
               +- Exchange hashpartitioning(id#18412, 1), ENSURE_REQUIREMENTS, [plan_id=85845]
                  +- *(1) HashAggregate(keys=[id#18412], functions=[partial_sum(value#18410), partial_avg(value#18410), partial_max(value#18410)], output=[id#18412, sum#18449L, sum#18452, count#18453L, max#18455])
                     +- *(1) Project [(value#18410 % 10) AS id#18412, value#18410]
                        +- MicroBatchScan[value#18410] MemoryStreamDataSource
Raw output
sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: 
Timed out waiting for stream: The code passed to failAfter did not complete within 120 seconds.
java.base/java.lang.Thread.getStackTrace(Thread.java:1619)
	org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:277)
	org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
	org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
	org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:481)
	org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7$adapted(StreamTest.scala:480)
	scala.collection.mutable.HashMap$Node.foreach(HashMap.scala:642)
	scala.collection.mutable.HashMap.foreach(HashMap.scala:504)
	org.apache.spark.sql.streaming.StreamTest.fetchStreamAnswer$1(StreamTest.scala:480)

	Caused by: 	null
	java.base/java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:1764)
		org.apache.spark.sql.execution.streaming.StreamExecution.awaitOffset(StreamExecution.scala:481)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$8(StreamTest.scala:482)
		scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
		org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
		org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
		org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
		org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
		org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
		org.apache.spark.sql.streaming.StreamTest.$anonfun$testStream$7(StreamTest.scala:481)


== Progress ==
   StartStream(ProcessingTimeTrigger(0),org.apache.spark.util.SystemClock@4587dfa5,Map(),/home/runner/work/spark/spark/target/tmp/spark-66af3ef5-c6f3-4baf-baad-8c55b424d347)
   AddData to MemoryStream[value#18410]: 1,11
=> CheckLastBatch: [1,12,6.0,11]
   StopStream

== Stream ==
Output Mode: Update
Stream state: {}
Thread state: alive
Thread stack trace: [email protected]/jdk.internal.misc.Unsafe.park(Native Method)
[email protected]/java.util.concurrent.locks.LockSupport.park(LockSupport.java:211)
[email protected]/java.util.concurrent.locks.AbstractQueuedSynchronizer.acquire(AbstractQueuedSynchronizer.java:715)
[email protected]/java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1047)
app//scala.concurrent.impl.Promise$DefaultPromise.tryAwait0(Promise.scala:243)
app//scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:255)
app//scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:104)
app//org.apache.spark.util.ThreadUtils$.awaitReady(ThreadUtils.scala:342)
app//org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:975)
app//org.apache.spark.SparkContext.runJob(SparkContext.scala:2428)
app//org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2(WriteToDataSourceV2Exec.scala:386)
app//org.apache.spark.sql.execution.datasources.v2.V2TableWriteExec.writeWithV2$(WriteToDataSourceV2Exec.scala:360)
app//org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.writeWithV2(WriteToDataSourceV2Exec.scala:308)
app//org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec.run(WriteToDataSourceV2Exec.scala:319)
app//org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43)
app//org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43)
app//org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49)
app//org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:4402)
app//org.apache.spark.sql.Dataset.$anonfun$collect$1(Dataset.scala:3634)
app//org.apache.spark.sql.Dataset$$Lambda$2304/0x00007f07e92df830.apply(Unknown Source)
app//org.apache.spark.sql.Dataset.$anonfun$withAction$2(Dataset.scala:4392)
app//org.apache.spark.sql.Dataset$$Lambda$2316/0x00007f07e92e42d8.apply(Unknown Source)
app//org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:557)
app//org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:4390)
app//org.apache.spark.sql.Dataset$$Lambda$2305/0x00007f07e92dd000.apply(Unknown Source)
app//org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$6(SQLExecution.scala:150)
app//org.apache.spark.sql.execution.SQLExecution$$$Lambda$2260/0x00007f07e92c2780.apply(Unknown Source)
app//org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:241)
app//org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$1(SQLExecution.scala:116)
app//org.apache.spark.sql.execution.SQLExecution$$$Lambda$2246/0x00007f07e92bf578.apply(Unknown Source)
app//org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:907)
app//org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId0(SQLExecution.scala:72)
app//org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:196)
app//org.apache.spark.sql.Dataset.withAction(Dataset.scala:4390)
app//org.apache.spark.sql.Dataset.collect(Dataset.scala:3634)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$17(MicroBatchExecution.scala:783)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2245/0x00007f07e92bf2b8.apply(Unknown Source)
app//org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$6(SQLExecution.scala:150)
app//org.apache.spark.sql.execution.SQLExecution$$$Lambda$2260/0x00007f07e92c2780.apply(Unknown Source)
app//org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:241)
app//org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId0$1(SQLExecution.scala:116)
app//org.apache.spark.sql.execution.SQLExecution$$$Lambda$2246/0x00007f07e92bf578.apply(Unknown Source)
app//org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:907)
app//org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId0(SQLExecution.scala:72)
app//org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:196)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runBatch$16(MicroBatchExecution.scala:771)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$2243/0x00007f07e92bea68.apply(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:427)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:425)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.runBatch(MicroBatchExecution.scala:771)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$2(MicroBatchExecution.scala:326)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$1945/0x00007f07e91e1918.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken(ProgressReporter.scala:427)
app//org.apache.spark.sql.execution.streaming.ProgressReporter.reportTimeTaken$(ProgressReporter.scala:425)
app//org.apache.spark.sql.execution.streaming.StreamExecution.reportTimeTaken(StreamExecution.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.$anonfun$runActivatedStream$1(MicroBatchExecution.scala:289)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution$$Lambda$1942/0x00007f07e91e0890.apply$mcZ$sp(Unknown Source)
app//org.apache.spark.sql.execution.streaming.ProcessingTimeExecutor.execute(TriggerExecutor.scala:67)
app//org.apache.spark.sql.execution.streaming.MicroBatchExecution.runActivatedStream(MicroBatchExecution.scala:279)
app//org.apache.spark.sql.execution.streaming.StreamExecution.$anonfun$runStream$1(StreamExecution.scala:311)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$Lambda$1933/0x00007f07e91d8578.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:907)
app//org.apache.spark.sql.execution.streaming.StreamExecution.org$apache$spark$sql$execution$streaming$StreamExecution$$runStream(StreamExecution.scala:289)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.$anonfun$run$1(StreamExecution.scala:211)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1$$Lambda$1929/0x00007f07e91d6dd0.apply$mcV$sp(Unknown Source)
app//scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
app//org.apache.spark.JobArtifactSet$.withActiveJobArtifactState(JobArtifactSet.scala:94)
app//org.apache.spark.sql.execution.streaming.StreamExecution$$anon$1.run(StreamExecution.scala:211)


== Sink ==



== Plan ==
== Parsed Logical Plan ==
WriteToMicroBatchDataSource MemorySink, 0430d473-964d-4315-8149-df7d344ffeeb, Update, 0
+- Aggregate [id#18412], [id#18412, sum(value#18410) AS sum_value#18417L, avg(value#18410) AS avg_value#18418, max(value#18410) AS max_value#18419]
   +- Project [(value#18410 % 10) AS id#18412, value#18410]
      +- StreamingDataSourceV2Relation [value#18410], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@32c496bd, MemoryStream[value#18410], -1, 0

== Analyzed Logical Plan ==
WriteToMicroBatchDataSource MemorySink, 0430d473-964d-4315-8149-df7d344ffeeb, Update, 0
+- Aggregate [id#18412], [id#18412, sum(value#18410) AS sum_value#18417L, avg(value#18410) AS avg_value#18418, max(value#18410) AS max_value#18419]
   +- Project [(value#18410 % 10) AS id#18412, value#18410]
      +- StreamingDataSourceV2Relation [value#18410], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@32c496bd, MemoryStream[value#18410], -1, 0

== Optimized Logical Plan ==
WriteToDataSourceV2 MicroBatchWrite[epoch: 0, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@7e577f2d]
+- Aggregate [id#18412], [id#18412, sum(value#18410) AS sum_value#18417L, avg(value#18410) AS avg_value#18418, max(value#18410) AS max_value#18419]
   +- Project [(value#18410 % 10) AS id#18412, value#18410]
      +- StreamingDataSourceV2Relation [value#18410], org.apache.spark.sql.execution.streaming.MemoryStreamScanBuilder@32c496bd, MemoryStream[value#18410], -1, 0

== Physical Plan ==
WriteToDataSourceV2 MicroBatchWrite[epoch: 0, writer: org.apache.spark.sql.execution.streaming.sources.MemoryStreamingWrite@7e577f2d], org.apache.spark.sql.execution.datasources.v2.DataSourceV2Strategy$$Lambda$2189/0x00007f07e928d350@4d9d0ee4
+- *(4) HashAggregate(keys=[id#18412], functions=[sum(value#18410), avg(value#18410), max(value#18410)], output=[id#18412, sum_value#18417L, avg_value#18418, max_value#18419])
   +- StateStoreSave [id#18412], state info [ checkpoint = file:/home/runner/work/spark/spark/target/tmp/spark-66af3ef5-c6f3-4baf-baad-8c55b424d347/state, runId = 251da6d0-95d0-48cc-99e3-5c08eb05de94, opId = 0, ver = 0, numPartitions = 1], Update, 0, 0, 1
      +- *(3) HashAggregate(keys=[id#18412], functions=[merge_sum(value#18410), merge_avg(value#18410), merge_max(value#18410)], output=[id#18412, sum#18449L, sum#18452, count#18453L, max#18455])
         +- StateStoreRestore [id#18412], state info [ checkpoint = file:/home/runner/work/spark/spark/target/tmp/spark-66af3ef5-c6f3-4baf-baad-8c55b424d347/state, runId = 251da6d0-95d0-48cc-99e3-5c08eb05de94, opId = 0, ver = 0, numPartitions = 1], 1
            +- *(2) HashAggregate(keys=[id#18412], functions=[merge_sum(value#18410), merge_avg(value#18410), merge_max(value#18410)], output=[id#18412, sum#18449L, sum#18452, count#18453L, max#18455])
               +- Exchange hashpartitioning(id#18412, 1), ENSURE_REQUIREMENTS, [plan_id=85845]
                  +- *(1) HashAggregate(keys=[id#18412], functions=[partial_sum(value#18410), partial_avg(value#18410), partial_max(value#18410)], output=[id#18412, sum#18449L, sum#18452, count#18453L, max#18455])
                     +- *(1) Project [(value#18410 % 10) AS id#18412, value#18410]
                        +- MicroBatchScan[value#18410] MemoryStreamDataSource

         
         
	at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)
	at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)
	at org.scalatest.funsuite.AnyFunSuite.newAssertionFailedException(AnyFunSuite.scala:1564)
	at org.scalatest.Assertions.fail(Assertions.scala:933)
	at org.scalatest.Assertions.fail$(Assertions.scala:929)
	at org.scalatest.funsuite.AnyFunSuite.fail(AnyFunSuite.scala:1564)
	at org.apache.spark.sql.streaming.StreamTest.failTest$1(StreamTest.scala:462)
	at org.apache.spark.sql.streaming.StreamTest.liftedTree1$1(StreamTest.scala:800)
	at org.apache.spark.sql.streaming.StreamTest.testStream(StreamTest.scala:776)
	at org.apache.spark.sql.streaming.StreamTest.testStream$(StreamTest.scala:342)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.testStream(StreamingAggregationSuite.scala:55)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.prepareTestForChangingSchemaOfState(StreamingAggregationSuite.scala:892)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.$anonfun$new$81(StreamingAggregationSuite.scala:777)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.$anonfun$new$81$adapted(StreamingAggregationSuite.scala:776)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1(SQLTestUtils.scala:80)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1$adapted(SQLTestUtils.scala:79)
	at org.apache.spark.SparkFunSuite.withTempDir(SparkFunSuite.scala:245)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.org$apache$spark$sql$test$SQLTestUtils$$super$withTempDir(StreamingAggregationSuite.scala:55)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir(SQLTestUtils.scala:79)
	at org.apache.spark.sql.test.SQLTestUtils.withTempDir$(SQLTestUtils.scala:78)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.withTempDir(StreamingAggregationSuite.scala:55)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.$anonfun$new$80(StreamingAggregationSuite.scala:776)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.$anonfun$executeFuncWithStateVersionSQLConf$1(StreamingAggregationSuite.scala:65)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(StreamingAggregationSuite.scala:55)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:248)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:246)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.withSQLConf(StreamingAggregationSuite.scala:55)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.executeFuncWithStateVersionSQLConf(StreamingAggregationSuite.scala:65)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.$anonfun$testQuietlyWithAllStateVersions$2(StreamingAggregationSuite.scala:82)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
	at org.apache.spark.sql.catalyst.util.package$.quietly(package.scala:42)
	at org.apache.spark.sql.test.SQLTestUtils.$anonfun$testQuietly$1(SQLTestUtils.scala:116)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
	at org.apache.spark.sql.streaming.RocksDBStateStoreTest.$anonfun$test$2(RocksDBStateStoreTest.scala:39)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54)
	at org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(StreamingAggregationSuite.scala:55)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:248)
	at org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:246)
	at org.apache.spark.sql.streaming.StreamingAggregationSuite.withSQLConf(StreamingAggregationSuite.scala:55)
	at org.apache.spark.sql.streaming.RocksDBStateStoreTest.$anonfun$test$1(RocksDBStateStoreTest.scala:39)
	at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
	at org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
	at org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
	at org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
	at org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
	at org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
	at org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155)
	at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
	at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
	at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
	at org.scalatest.Transformer.apply(Transformer.scala:22)
	at org.scalatest.Transformer.apply(Transformer.scala:20)
	at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
	at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227)
	at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
	at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
	at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
	at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
	at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69)
	at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
	at org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
	at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:69)
	at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
	at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
	at scala.collection.immutable.List.foreach(List.scala:333)
	at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
	at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
	at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
	at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
	at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
	at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
	at org.scalatest.Suite.run(Suite.scala:1114)
	at org.scalatest.Suite.run$(Suite.scala:1096)
	at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
	at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
	at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
	at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
	at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
	at org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:69)
	at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
	at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
	at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
	at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69)
	at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)
	at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)
	at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
	at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
	at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
	at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
	at java.base/java.lang.Thread.run(Thread.java:840)