aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTao LI <tl@microsoft.com>2016-10-02 16:01:02 -0700
committerReynold Xin <rxin@databricks.com>2016-10-02 16:01:02 -0700
commit76dc2d9073e5e5c45c8b806a474beacb8415d506 (patch)
treec39c30f53d98caad457defe67fdbb9177660eaba
parentf8d7fade4b9a78ae87b6012e3d6f71eef3032b22 (diff)
downloadspark-76dc2d9073e5e5c45c8b806a474beacb8415d506.tar.gz
spark-76dc2d9073e5e5c45c8b806a474beacb8415d506.tar.bz2
spark-76dc2d9073e5e5c45c8b806a474beacb8415d506.zip
[SPARK-14914][CORE][SQL] Skip/fix some test cases on Windows due to limitation of Windows
## What changes were proposed in this pull request? This PR proposes to fix/skip some tests failed on Windows. This PR takes over https://github.com/apache/spark/pull/12696. **Before** - **SparkSubmitSuite** ``` [info] - launch simple application with spark-submit *** FAILED *** (202 milliseconds) [info] java.io.IOException: Cannot run program "./bin/spark-submit" (in directory "C:\projects\spark"): CreateProcess error=2, The system cannot find the file specifie [info] - includes jars passed in through --jars *** FAILED *** (1 second, 625 milliseconds) [info] java.io.IOException: Cannot run program "./bin/spark-submit" (in directory "C:\projects\spark"): CreateProcess error=2, The system cannot find the file specified ``` - **DiskStoreSuite** ``` [info] - reads of memory-mapped and non memory-mapped files are equivalent *** FAILED *** (1 second, 78 milliseconds) [info] diskStoreMapped.remove(blockId) was false (DiskStoreSuite.scala:41) ``` **After** - **SparkSubmitSuite** ``` [info] - launch simple application with spark-submit (578 milliseconds) [info] - includes jars passed in through --jars (1 second, 875 milliseconds) ``` - **DiskStoreSuite** ``` [info] DiskStoreSuite: [info] - reads of memory-mapped and non memory-mapped files are equivalent !!! CANCELED !!! (766 milliseconds ``` For `CreateTableAsSelectSuite` and `FsHistoryProviderSuite`, I could not reproduce as the Java version seems higher than the one that has the bugs about `setReadable(..)` and `setWritable(...)` but as they are bugs reported clearly, it'd be sensible to skip those. We should revert the changes for both back as soon as we drop the support of Java 7. ## How was this patch tested? Manually tested via AppVeyor. Closes #12696 Author: Tao LI <tl@microsoft.com> Author: U-FAREAST\tl <tl@microsoft.com> Author: hyukjinkwon <gurwls223@gmail.com> Closes #15320 from HyukjinKwon/SPARK-14914.
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala12
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala7
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala3
5 files changed, 16 insertions, 12 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index f3493bd96b..ef832756ce 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -23,7 +23,7 @@ import java.net._
import java.nio.ByteBuffer
import java.nio.channels.Channels
import java.nio.charset.StandardCharsets
-import java.nio.file.Files
+import java.nio.file.{Files, Paths}
import java.util.{Locale, Properties, Random, UUID}
import java.util.concurrent._
import java.util.concurrent.atomic.AtomicBoolean
@@ -1014,15 +1014,7 @@ private[spark] object Utils extends Logging {
* Check to see if file is a symbolic link.
*/
def isSymlink(file: File): Boolean = {
- if (file == null) throw new NullPointerException("File must not be null")
- if (isWindows) return false
- val fileInCanonicalDir = if (file.getParent() == null) {
- file
- } else {
- new File(file.getParentFile().getCanonicalFile(), file.getName())
- }
-
- !fileInCanonicalDir.getCanonicalFile().equals(fileInCanonicalDir.getAbsoluteFile())
+ return Files.isSymbolicLink(Paths.get(file.toURI))
}
/**
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index 31c8fb2646..732cbfaaee 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -649,8 +649,13 @@ class SparkSubmitSuite
// NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
private def runSparkSubmit(args: Seq[String]): Unit = {
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
+ val sparkSubmitFile = if (Utils.isWindows) {
+ new File("..\\bin\\spark-submit.cmd")
+ } else {
+ new File("../bin/spark-submit")
+ }
val process = Utils.executeCommand(
- Seq("./bin/spark-submit") ++ args,
+ Seq(sparkSubmitFile.getCanonicalPath) ++ args,
new File(sparkHome),
Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome))
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index 01bef0a11c..a5eda7b5a5 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -126,6 +126,8 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
}
test("SPARK-3697: ignore directories that cannot be read.") {
+ // setReadable(...) does not work on Windows. Please refer JDK-6728842.
+ assume(!Utils.isWindows)
val logFile1 = newLogFile("new1", None, inProgress = false)
writeFile(logFile1, true, None,
SparkListenerApplicationStart("app1-1", Some("app1-1"), 1L, "test", None),
diff --git a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
index 9ed5016510..9e6b02b9ea 100644
--- a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
@@ -22,10 +22,14 @@ import java.util.Arrays
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.io.ChunkedByteBuffer
+import org.apache.spark.util.Utils
class DiskStoreSuite extends SparkFunSuite {
test("reads of memory-mapped and non memory-mapped files are equivalent") {
+ // It will cause error when we tried to re-open the filestore and the
+ // memory-mapped byte buffer tot he file has not been GC on Windows.
+ assume(!Utils.isWindows)
val confKey = "spark.storage.memoryMapThreshold"
// Create a non-trivial (not all zeros) byte array
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
index 344d4aa6cf..c39005f6a1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
@@ -26,7 +26,6 @@ import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.parser.ParseException
-import org.apache.spark.sql.execution.command.DDLUtils
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.util.Utils
@@ -83,6 +82,8 @@ class CreateTableAsSelectSuite
}
test("CREATE TABLE USING AS SELECT based on the file without write permission") {
+ // setWritable(...) does not work on Windows. Please refer JDK-6728842.
+ assume(!Utils.isWindows)
val childPath = new File(path.toString, "child")
path.mkdir()
path.setWritable(false)