Skip to content
Snippets Groups Projects
Unverified Commit 3ef183a9 authored by Shixiong Zhu's avatar Shixiong Zhu Committed by Sean Owen
Browse files

[SPARK-19113][SS][TESTS] Set UncaughtExceptionHandler in onQueryStarted to...

[SPARK-19113][SS][TESTS] Set UncaughtExceptionHandler in onQueryStarted to ensure catching fatal errors during query initialization

## What changes were proposed in this pull request?

StreamTest sets `UncaughtExceptionHandler` after starting the query now. It may not be able to catch fatal errors during query initialization. This PR uses `onQueryStarted` callback to fix it.

## How was this patch tested?

Jenkins

Author: Shixiong Zhu <shixiong@databricks.com>

Closes #16492 from zsxwing/SPARK-19113.
parent a2c6adcc
No related branches found
No related tags found
No related merge requests found
......@@ -238,7 +238,7 @@ class StreamSuite extends StreamTest {
}
}
testQuietly("fatal errors from a source should be sent to the user") {
testQuietly("handle fatal errors thrown from the stream thread") {
for (e <- Seq(
new VirtualMachineError {},
new ThreadDeath,
......@@ -259,8 +259,11 @@ class StreamSuite extends StreamTest {
override def stop(): Unit = {}
}
val df = Dataset[Int](sqlContext.sparkSession, StreamingExecutionRelation(source))
// These error are fatal errors and should be ignored in `testStream` to not fail the test.
testStream(df)(
// `ExpectFailure(isFatalError = true)` verifies two things:
// - Fatal errors can be propagated to `StreamingQuery.exception` and
// `StreamingQuery.awaitTermination` like non fatal errors.
// - Fatal errors can be caught by UncaughtExceptionHandler.
ExpectFailure(isFatalError = true)(ClassTag(e.getClass))
)
}
......
......@@ -235,7 +235,10 @@ trait StreamTest extends QueryTest with SharedSQLContext with Timeouts {
*/
def testStream(
_stream: Dataset[_],
outputMode: OutputMode = OutputMode.Append)(actions: StreamAction*): Unit = {
outputMode: OutputMode = OutputMode.Append)(actions: StreamAction*): Unit = synchronized {
// `synchronized` is added to prevent the user from calling multiple `testStream`s concurrently
// because this method assumes there is only one active query in its `StreamingQueryListener`
// and it may not work correctly when multiple `testStream`s run concurrently.
val stream = _stream.toDF()
val sparkSession = stream.sparkSession // use the session in DF, not the default session
......@@ -248,6 +251,22 @@ trait StreamTest extends QueryTest with SharedSQLContext with Timeouts {
@volatile
var streamThreadDeathCause: Throwable = null
// Set UncaughtExceptionHandler in `onQueryStarted` so that we can ensure catching fatal errors
// during query initialization.
val listener = new StreamingQueryListener {
override def onQueryStarted(event: QueryStartedEvent): Unit = {
// Note: this assumes there is only one query active in the `testStream` method.
Thread.currentThread.setUncaughtExceptionHandler(new UncaughtExceptionHandler {
override def uncaughtException(t: Thread, e: Throwable): Unit = {
streamThreadDeathCause = e
}
})
}
override def onQueryProgress(event: QueryProgressEvent): Unit = {}
override def onQueryTerminated(event: QueryTerminatedEvent): Unit = {}
}
sparkSession.streams.addListener(listener)
// If the test doesn't manually start the stream, we do it automatically at the beginning.
val startedManually =
......@@ -364,12 +383,6 @@ trait StreamTest extends QueryTest with SharedSQLContext with Timeouts {
triggerClock = triggerClock)
.asInstanceOf[StreamingQueryWrapper]
.streamingQuery
currentStream.microBatchThread.setUncaughtExceptionHandler(
new UncaughtExceptionHandler {
override def uncaughtException(t: Thread, e: Throwable): Unit = {
streamThreadDeathCause = e
}
})
// Wait until the initialization finishes, because some tests need to use `logicalPlan`
// after starting the query.
currentStream.awaitInitialization(streamingTimeout.toMillis)
......@@ -545,6 +558,7 @@ trait StreamTest extends QueryTest with SharedSQLContext with Timeouts {
case (key, Some(value)) => sparkSession.conf.set(key, value)
case (key, None) => sparkSession.conf.unset(key)
}
sparkSession.streams.removeListener(listener)
}
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment