Skip to content
Snippets Groups Projects
Commit 53d4abe9 authored by Bo Meng's avatar Bo Meng Committed by Reynold Xin
Browse files

[SPARK-15537][SQL] fix dir delete issue

## What changes were proposed in this pull request?

For some of the test cases, e.g. `OrcSourceSuite`, it will create temp folders and temp files inside them. But after tests finish, the folders are not removed. This will cause lots of temp files created and space occupied, if we keep running the test cases.

The reason is dir.delete() won't work if dir is not empty. We need to recursively delete the content before deleting the folder.

## How was this patch tested?

Manually checked the temp folder to make sure the temp files were deleted.

Author: Bo Meng <mengbo@hotmail.com>

Closes #13304 from bomeng/SPARK-15537.
parent 361ebc28
No related branches found
No related tags found
No related merge requests found
...@@ -53,12 +53,6 @@ case class Person(name: String, age: Int, contacts: Seq[Contact]) ...@@ -53,12 +53,6 @@ case class Person(name: String, age: Int, contacts: Seq[Contact])
class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest { class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
def getTempFilePath(prefix: String, suffix: String = ""): File = {
val tempFile = File.createTempFile(prefix, suffix)
tempFile.delete()
tempFile
}
test("Read/write All Types") { test("Read/write All Types") {
val data = (0 to 255).map { i => val data = (0 to 255).map { i =>
(s"$i", i, i.toLong, i.toFloat, i.toDouble, i.toShort, i.toByte, i % 2 == 0) (s"$i", i, i.toLong, i.toFloat, i.toDouble, i.toShort, i.toByte, i % 2 == 0)
......
...@@ -25,6 +25,7 @@ import org.apache.spark.sql.{QueryTest, Row} ...@@ -25,6 +25,7 @@ import org.apache.spark.sql.{QueryTest, Row}
import org.apache.spark.sql.hive.test.TestHiveSingleton import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.sources._ import org.apache.spark.sql.sources._
import org.apache.spark.sql.types._ import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
case class OrcData(intField: Int, stringField: String) case class OrcData(intField: Int, stringField: String)
...@@ -37,14 +38,10 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA ...@@ -37,14 +38,10 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA
override def beforeAll(): Unit = { override def beforeAll(): Unit = {
super.beforeAll() super.beforeAll()
orcTableAsDir = File.createTempFile("orctests", "sparksql") orcTableAsDir = Utils.createTempDir("orctests", "sparksql")
orcTableAsDir.delete()
orcTableAsDir.mkdir()
// Hack: to prepare orc data files using hive external tables // Hack: to prepare orc data files using hive external tables
orcTableDir = File.createTempFile("orctests", "sparksql") orcTableDir = Utils.createTempDir("orctests", "sparksql")
orcTableDir.delete()
orcTableDir.mkdir()
import org.apache.spark.sql.hive.test.TestHive.implicits._ import org.apache.spark.sql.hive.test.TestHive.implicits._
sparkContext sparkContext
...@@ -68,15 +65,6 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA ...@@ -68,15 +65,6 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA
""".stripMargin) """.stripMargin)
} }
override def afterAll(): Unit = {
try {
orcTableDir.delete()
orcTableAsDir.delete()
} finally {
super.afterAll()
}
}
test("create temporary orc table") { test("create temporary orc table") {
checkAnswer(sql("SELECT COUNT(*) FROM normal_orc_source"), Row(10)) checkAnswer(sql("SELECT COUNT(*) FROM normal_orc_source"), Row(10))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment