From 7e15044d9d9f9839c8d422bae71f27e855d559b4 Mon Sep 17 00:00:00 2001
From: Yucai Yu <yucai.yu@intel.com>
Date: Tue, 12 Jan 2016 13:23:23 +0000
Subject: [PATCH] [SPARK-12582][TEST] IndexShuffleBlockResolverSuite fails in
 windows

[SPARK-12582][Test] IndexShuffleBlockResolverSuite fails in windows

* IndexShuffleBlockResolverSuite fails in windows due to file is not closed.
* mv IndexShuffleBlockResolverSuite.scala from "test/java" to "test/scala".

https://issues.apache.org/jira/browse/SPARK-12582

Author: Yucai Yu <yucai.yu@intel.com>

Closes #10526 from yucai/master.
---
 .../sort/IndexShuffleBlockResolverSuite.scala | 51 ++++++++++++-------
 1 file changed, 34 insertions(+), 17 deletions(-)
 rename core/src/test/{java => scala}/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala (87%)

diff --git a/core/src/test/java/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala b/core/src/test/scala/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
similarity index 87%
rename from core/src/test/java/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
rename to core/src/test/scala/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
index f200ff36c7..d21ce73f40 100644
--- a/core/src/test/java/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
@@ -19,18 +19,18 @@ package org.apache.spark.shuffle.sort
 
 import java.io.{File, FileInputStream, FileOutputStream}
 
+import org.mockito.{Mock, MockitoAnnotations}
 import org.mockito.Answers.RETURNS_SMART_NULLS
 import org.mockito.Matchers._
 import org.mockito.Mockito._
 import org.mockito.invocation.InvocationOnMock
 import org.mockito.stubbing.Answer
-import org.mockito.{Mock, MockitoAnnotations}
 import org.scalatest.BeforeAndAfterEach
 
+import org.apache.spark.{SparkConf, SparkFunSuite}
 import org.apache.spark.shuffle.IndexShuffleBlockResolver
 import org.apache.spark.storage._
 import org.apache.spark.util.Utils
-import org.apache.spark.{SparkConf, SparkFunSuite}
 
 
 class IndexShuffleBlockResolverSuite extends SparkFunSuite with BeforeAndAfterEach {
@@ -64,12 +64,15 @@ class IndexShuffleBlockResolverSuite extends SparkFunSuite with BeforeAndAfterEa
   }
 
   test("commit shuffle files multiple times") {
-    val lengths = Array[Long](10, 0, 20)
     val resolver = new IndexShuffleBlockResolver(conf, blockManager)
+    val lengths = Array[Long](10, 0, 20)
     val dataTmp = File.createTempFile("shuffle", null, tempDir)
     val out = new FileOutputStream(dataTmp)
-    out.write(new Array[Byte](30))
-    out.close()
+    Utils.tryWithSafeFinally {
+      out.write(new Array[Byte](30))
+    } {
+      out.close()
+    }
     resolver.writeIndexFileAndCommit(1, 2, lengths, dataTmp)
 
     val dataFile = resolver.getDataFile(1, 2)
@@ -77,12 +80,15 @@ class IndexShuffleBlockResolverSuite extends SparkFunSuite with BeforeAndAfterEa
     assert(dataFile.length() === 30)
     assert(!dataTmp.exists())
 
+    val lengths2 = new Array[Long](3)
     val dataTmp2 = File.createTempFile("shuffle", null, tempDir)
     val out2 = new FileOutputStream(dataTmp2)
-    val lengths2 = new Array[Long](3)
-    out2.write(Array[Byte](1))
-    out2.write(new Array[Byte](29))
-    out2.close()
+    Utils.tryWithSafeFinally {
+      out2.write(Array[Byte](1))
+      out2.write(new Array[Byte](29))
+    } {
+      out2.close()
+    }
     resolver.writeIndexFileAndCommit(1, 2, lengths2, dataTmp2)
     assert(lengths2.toSeq === lengths.toSeq)
     assert(dataFile.exists())
@@ -90,20 +96,27 @@ class IndexShuffleBlockResolverSuite extends SparkFunSuite with BeforeAndAfterEa
     assert(!dataTmp2.exists())
 
     // The dataFile should be the previous one
-    val in = new FileInputStream(dataFile)
     val firstByte = new Array[Byte](1)
-    in.read(firstByte)
+    val in = new FileInputStream(dataFile)
+    Utils.tryWithSafeFinally {
+      in.read(firstByte)
+    } {
+      in.close()
+    }
     assert(firstByte(0) === 0)
 
     // remove data file
     dataFile.delete()
 
+    val lengths3 = Array[Long](10, 10, 15)
     val dataTmp3 = File.createTempFile("shuffle", null, tempDir)
     val out3 = new FileOutputStream(dataTmp3)
-    val lengths3 = Array[Long](10, 10, 15)
-    out3.write(Array[Byte](2))
-    out3.write(new Array[Byte](34))
-    out3.close()
+    Utils.tryWithSafeFinally {
+      out3.write(Array[Byte](2))
+      out3.write(new Array[Byte](34))
+    } {
+      out3.close()
+    }
     resolver.writeIndexFileAndCommit(1, 2, lengths3, dataTmp3)
     assert(lengths3.toSeq != lengths.toSeq)
     assert(dataFile.exists())
@@ -111,9 +124,13 @@ class IndexShuffleBlockResolverSuite extends SparkFunSuite with BeforeAndAfterEa
     assert(!dataTmp2.exists())
 
     // The dataFile should be the previous one
-    val in2 = new FileInputStream(dataFile)
     val firstByte2 = new Array[Byte](1)
-    in2.read(firstByte2)
+    val in2 = new FileInputStream(dataFile)
+    Utils.tryWithSafeFinally {
+      in2.read(firstByte2)
+    } {
+      in2.close()
+    }
     assert(firstByte2(0) === 2)
   }
 }
-- 
GitLab