Skip to content
Snippets Groups Projects
Commit 600e9972 authored by Matei Zaharia's avatar Matei Zaharia
Browse files

Fix a bug where an input path was added to a Hadoop job configuration twice

parent 628bb5ca
No related branches found
No related tags found
No related merge requests found
......@@ -182,15 +182,12 @@ class SparkContext(
/** Get an RDD for a Hadoop file with an arbitrary new API InputFormat. */
def newAPIHadoopFile[K, V, F <: NewInputFormat[K, V]](path: String)
(implicit km: ClassManifest[K], vm: ClassManifest[V], fm: ClassManifest[F]): RDD[(K, V)] = {
val job = new NewHadoopJob
NewFileInputFormat.addInputPath(job, new Path(path))
val conf = job.getConfiguration
newAPIHadoopFile(
path,
fm.erasure.asInstanceOf[Class[F]],
km.erasure.asInstanceOf[Class[K]],
vm.erasure.asInstanceOf[Class[V]],
conf)
new Configuration)
}
/**
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment