Skip to content
Snippets Groups Projects
Commit fcb37502 authored by Reynold Xin's avatar Reynold Xin
Browse files

SPARK-1933: Throw a more meaningful exception when a directory is passed to addJar/addFile.

https://issues.apache.org/jira/browse/SPARK-1933



Author: Reynold Xin <rxin@apache.org>

Closes #888 from rxin/addfile and squashes the following commits:

8c402a3 [Reynold Xin] Updated comment.
ff6c162 [Reynold Xin] SPARK-1933: Throw a more meaningful exception when a directory is passed to addJar/addFile.

(cherry picked from commit 90e281b5)
Signed-off-by: default avatarReynold Xin <rxin@apache.org>
parent 9bcd9992
No related branches found
No related tags found
No related merge requests found
...@@ -59,6 +59,13 @@ private[spark] class HttpFileServer(securityManager: SecurityManager) extends Lo ...@@ -59,6 +59,13 @@ private[spark] class HttpFileServer(securityManager: SecurityManager) extends Lo
} }
def addFileToDir(file: File, dir: File) : String = { def addFileToDir(file: File, dir: File) : String = {
// Check whether the file is a directory. If it is, throw a more meaningful exception.
// If we don't catch this, Guava throws a very confusing error message:
// java.io.FileNotFoundException: [file] (No such file or directory)
// even though the directory ([file]) exists.
if (file.isDirectory) {
throw new IllegalArgumentException(s"$file cannot be a directory.")
}
Files.copy(file, new File(dir, file.getName)) Files.copy(file, new File(dir, file.getName))
dir + "/" + file.getName dir + "/" + file.getName
} }
......
...@@ -794,7 +794,7 @@ class SparkContext(config: SparkConf) extends Logging { ...@@ -794,7 +794,7 @@ class SparkContext(config: SparkConf) extends Logging {
addedFiles(key) = System.currentTimeMillis addedFiles(key) = System.currentTimeMillis
// Fetch the file locally in case a job is executed using DAGScheduler.runLocally(). // Fetch the file locally in case a job is executed using DAGScheduler.runLocally().
Utils.fetchFile(path, new File(SparkFiles.getRootDirectory), conf, env.securityManager) Utils.fetchFile(path, new File(SparkFiles.getRootDirectory()), conf, env.securityManager)
logInfo("Added file " + path + " at " + key + " with timestamp " + addedFiles(key)) logInfo("Added file " + path + " at " + key + " with timestamp " + addedFiles(key))
postEnvironmentUpdate() postEnvironmentUpdate()
...@@ -932,13 +932,12 @@ class SparkContext(config: SparkConf) extends Logging { ...@@ -932,13 +932,12 @@ class SparkContext(config: SparkConf) extends Logging {
try { try {
env.httpFileServer.addJar(new File(fileName)) env.httpFileServer.addJar(new File(fileName))
} catch { } catch {
case e: Exception => { case e: Exception =>
// For now just log an error but allow to go through so spark examples work. // For now just log an error but allow to go through so spark examples work.
// The spark examples don't really need the jar distributed since its also // The spark examples don't really need the jar distributed since its also
// the app jar. // the app jar.
logError("Error adding jar (" + e + "), was the --addJars option used?") logError("Error adding jar (" + e + "), was the --addJars option used?")
null null
}
} }
} else { } else {
env.httpFileServer.addJar(new File(uri.getPath)) env.httpFileServer.addJar(new File(uri.getPath))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment