From 53d4abe9e996e53c1bdcd5ac4cb8cbf08b9ec8b5 Mon Sep 17 00:00:00 2001
From: Bo Meng <mengbo@hotmail.com>
Date: Thu, 26 May 2016 00:22:47 -0700
Subject: [PATCH] [SPARK-15537][SQL] fix dir delete issue

## What changes were proposed in this pull request?

For some of the test cases, e.g. `OrcSourceSuite`, it will create temp folders and temp files inside them. But after tests finish, the folders are not removed. This will cause lots of temp files created and space occupied, if we keep running the test cases.

The reason is dir.delete() won't work if dir is not empty. We need to recursively delete the content before deleting the folder.

## How was this patch tested?

Manually checked the temp folder to make sure the temp files were deleted.

Author: Bo Meng <mengbo@hotmail.com>

Closes #13304 from bomeng/SPARK-15537.
---
 .../spark/sql/hive/orc/OrcQuerySuite.scala     |  6 ------
 .../spark/sql/hive/orc/OrcSourceSuite.scala    | 18 +++---------------
 2 files changed, 3 insertions(+), 21 deletions(-)

diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
index f83b3a3de2..9771b2314a 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
@@ -53,12 +53,6 @@ case class Person(name: String, age: Int, contacts: Seq[Contact])
 
 class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
 
-  def getTempFilePath(prefix: String, suffix: String = ""): File = {
-    val tempFile = File.createTempFile(prefix, suffix)
-    tempFile.delete()
-    tempFile
-  }
-
   test("Read/write All Types") {
     val data = (0 to 255).map { i =>
       (s"$i", i, i.toLong, i.toFloat, i.toDouble, i.toShort, i.toByte, i % 2 == 0)
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
index 6081d86f44..4cac334859 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
@@ -25,6 +25,7 @@ import org.apache.spark.sql.{QueryTest, Row}
 import org.apache.spark.sql.hive.test.TestHiveSingleton
 import org.apache.spark.sql.sources._
 import org.apache.spark.sql.types._
+import org.apache.spark.util.Utils
 
 case class OrcData(intField: Int, stringField: String)
 
@@ -37,14 +38,10 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA
   override def beforeAll(): Unit = {
     super.beforeAll()
 
-    orcTableAsDir = File.createTempFile("orctests", "sparksql")
-    orcTableAsDir.delete()
-    orcTableAsDir.mkdir()
+    orcTableAsDir = Utils.createTempDir("orctests", "sparksql")
 
     // Hack: to prepare orc data files using hive external tables
-    orcTableDir = File.createTempFile("orctests", "sparksql")
-    orcTableDir.delete()
-    orcTableDir.mkdir()
+    orcTableDir = Utils.createTempDir("orctests", "sparksql")
     import org.apache.spark.sql.hive.test.TestHive.implicits._
 
     sparkContext
@@ -68,15 +65,6 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA
        """.stripMargin)
   }
 
-  override def afterAll(): Unit = {
-    try {
-      orcTableDir.delete()
-      orcTableAsDir.delete()
-    } finally {
-      super.afterAll()
-    }
-  }
-
   test("create temporary orc table") {
     checkAnswer(sql("SELECT COUNT(*) FROM normal_orc_source"), Row(10))
 
-- 
GitLab