Skip to content
Snippets Groups Projects
Commit 7e1c97fc authored by Matei Zaharia's avatar Matei Zaharia
Browse files

Merge branch 'master' into mesos-0.9

parents 6888bc71 04827679
No related branches found
No related tags found
No related merge requests found
...@@ -12,8 +12,8 @@ import java.io.IOException ...@@ -12,8 +12,8 @@ import java.io.IOException
import java.net.URI import java.net.URI
import java.util.Date import java.util.Date
import spark.SerializableWritable
import spark.Logging import spark.Logging
import spark.SerializableWritable
/** /**
* Saves an RDD using a Hadoop OutputFormat as specified by a JobConf. The JobConf should also * Saves an RDD using a Hadoop OutputFormat as specified by a JobConf. The JobConf should also
...@@ -84,26 +84,23 @@ class HadoopWriter(@transient jobConf: JobConf) extends Logging with Serializabl ...@@ -84,26 +84,23 @@ class HadoopWriter(@transient jobConf: JobConf) extends Logging with Serializabl
writer.close(Reporter.NULL) writer.close(Reporter.NULL)
} }
def commit(): Boolean = { def commit() {
var result = false
val taCtxt = getTaskContext() val taCtxt = getTaskContext()
val cmtr = getOutputCommitter() val cmtr = getOutputCommitter()
if (cmtr.needsTaskCommit(taCtxt)) { if (cmtr.needsTaskCommit(taCtxt)) {
try { try {
cmtr.commitTask(taCtxt) cmtr.commitTask(taCtxt)
logInfo (taID + ": Committed") logInfo (taID + ": Committed")
result = true
} catch { } catch {
case e:IOException => { case e: IOException => {
logError ("Error committing the output of task: " + taID.value) logError("Error committing the output of task: " + taID.value, e)
e.printStackTrace()
cmtr.abortTask(taCtxt) cmtr.abortTask(taCtxt)
throw e
} }
} }
return result } else {
} logWarning ("No need to commit output of task: " + taID.value)
logWarning ("No need to commit output of task: " + taID.value) }
return true
} }
def cleanup() { def cleanup() {
......
...@@ -335,7 +335,7 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest]( ...@@ -335,7 +335,7 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest](
val writer = new HadoopWriter(conf) val writer = new HadoopWriter(conf)
writer.preSetup() writer.preSetup()
def writeToFile(context: TaskContext, iter: Iterator[(K,V)]): HadoopWriter = { def writeToFile(context: TaskContext, iter: Iterator[(K,V)]) {
writer.setup(context.stageId, context.splitId, context.attemptId) writer.setup(context.stageId, context.splitId, context.attemptId)
writer.open() writer.open()
...@@ -347,10 +347,10 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest]( ...@@ -347,10 +347,10 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest](
} }
writer.close() writer.close()
return writer writer.commit()
} }
self.context.runJob(self, writeToFile _ ).foreach(_.commit()) self.context.runJob(self, writeToFile _)
writer.cleanup() writer.cleanup()
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment