spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r...@apache.org
Subject spark git commit: [SPARK-15537][SQL] fix dir delete issue
Date Thu, 26 May 2016 07:22:50 GMT
Repository: spark
Updated Branches:
  refs/heads/master 361ebc282 -> 53d4abe9e


[SPARK-15537][SQL] fix dir delete issue

## What changes were proposed in this pull request?

For some of the test cases, e.g. `OrcSourceSuite`, it will create temp folders and temp files
inside them. But after tests finish, the folders are not removed. This will cause lots of
temp files created and space occupied, if we keep running the test cases.

The reason is dir.delete() won't work if dir is not empty. We need to recursively delete the
content before deleting the folder.

## How was this patch tested?

Manually checked the temp folder to make sure the temp files were deleted.

Author: Bo Meng <mengbo@hotmail.com>

Closes #13304 from bomeng/SPARK-15537.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/53d4abe9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/53d4abe9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/53d4abe9

Branch: refs/heads/master
Commit: 53d4abe9e996e53c1bdcd5ac4cb8cbf08b9ec8b5
Parents: 361ebc2
Author: Bo Meng <mengbo@hotmail.com>
Authored: Thu May 26 00:22:47 2016 -0700
Committer: Reynold Xin <rxin@databricks.com>
Committed: Thu May 26 00:22:47 2016 -0700

----------------------------------------------------------------------
 .../apache/spark/sql/hive/orc/OrcQuerySuite.scala |  6 ------
 .../spark/sql/hive/orc/OrcSourceSuite.scala       | 18 +++---------------
 2 files changed, 3 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/53d4abe9/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
index f83b3a3..9771b23 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
@@ -53,12 +53,6 @@ case class Person(name: String, age: Int, contacts: Seq[Contact])
 
 class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
 
-  def getTempFilePath(prefix: String, suffix: String = ""): File = {
-    val tempFile = File.createTempFile(prefix, suffix)
-    tempFile.delete()
-    tempFile
-  }
-
   test("Read/write All Types") {
     val data = (0 to 255).map { i =>
       (s"$i", i, i.toLong, i.toFloat, i.toDouble, i.toShort, i.toByte, i % 2 == 0)

http://git-wip-us.apache.org/repos/asf/spark/blob/53d4abe9/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
index 6081d86..4cac334 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
@@ -25,6 +25,7 @@ import org.apache.spark.sql.{QueryTest, Row}
 import org.apache.spark.sql.hive.test.TestHiveSingleton
 import org.apache.spark.sql.sources._
 import org.apache.spark.sql.types._
+import org.apache.spark.util.Utils
 
 case class OrcData(intField: Int, stringField: String)
 
@@ -37,14 +38,10 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with
BeforeAndA
   override def beforeAll(): Unit = {
     super.beforeAll()
 
-    orcTableAsDir = File.createTempFile("orctests", "sparksql")
-    orcTableAsDir.delete()
-    orcTableAsDir.mkdir()
+    orcTableAsDir = Utils.createTempDir("orctests", "sparksql")
 
     // Hack: to prepare orc data files using hive external tables
-    orcTableDir = File.createTempFile("orctests", "sparksql")
-    orcTableDir.delete()
-    orcTableDir.mkdir()
+    orcTableDir = Utils.createTempDir("orctests", "sparksql")
     import org.apache.spark.sql.hive.test.TestHive.implicits._
 
     sparkContext
@@ -68,15 +65,6 @@ abstract class OrcSuite extends QueryTest with TestHiveSingleton with BeforeAndA
        """.stripMargin)
   }
 
-  override def afterAll(): Unit = {
-    try {
-      orcTableDir.delete()
-      orcTableAsDir.delete()
-    } finally {
-      super.afterAll()
-    }
-  }
-
   test("create temporary orc table") {
     checkAnswer(sql("SELECT COUNT(*) FROM normal_orc_source"), Row(10))
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message