Skip to content
Snippets Groups Projects
Commit fbe40c58 authored by Vadim Chekan's avatar Vadim Chekan
Browse files

Serialize and restore spark.cleaner.ttl to savepoint

parent a106ed8b
No related branches found
No related tags found
No related merge requests found
...@@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configuration ...@@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configuration
import org.apache.spark.Logging import org.apache.spark.Logging
import org.apache.spark.io.CompressionCodec import org.apache.spark.io.CompressionCodec
import org.apache.spark.util.MetadataCleaner
private[streaming] private[streaming]
...@@ -40,6 +41,7 @@ class Checkpoint(@transient ssc: StreamingContext, val checkpointTime: Time) ...@@ -40,6 +41,7 @@ class Checkpoint(@transient ssc: StreamingContext, val checkpointTime: Time)
val checkpointDir = ssc.checkpointDir val checkpointDir = ssc.checkpointDir
val checkpointDuration = ssc.checkpointDuration val checkpointDuration = ssc.checkpointDuration
val pendingTimes = ssc.scheduler.jobManager.getPendingTimes() val pendingTimes = ssc.scheduler.jobManager.getPendingTimes()
val delaySeconds = MetadataCleaner.getDelaySeconds
def validate() { def validate() {
assert(master != null, "Checkpoint.master is null") assert(master != null, "Checkpoint.master is null")
......
...@@ -100,6 +100,10 @@ class StreamingContext private ( ...@@ -100,6 +100,10 @@ class StreamingContext private (
"both SparkContext and checkpoint as null") "both SparkContext and checkpoint as null")
} }
if(cp_ != null && cp_.delaySeconds >= 0 && MetadataCleaner.getDelaySeconds < 0) {
MetadataCleaner.setDelaySeconds(cp_.delaySeconds)
}
if (MetadataCleaner.getDelaySeconds < 0) { if (MetadataCleaner.getDelaySeconds < 0) {
throw new SparkException("Spark Streaming cannot be used without setting spark.cleaner.ttl; " throw new SparkException("Spark Streaming cannot be used without setting spark.cleaner.ttl; "
+ "set this property before creating a SparkContext (use SPARK_JAVA_OPTS for the shell)") + "set this property before creating a SparkContext (use SPARK_JAVA_OPTS for the shell)")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment