aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala12
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala7
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala3
5 files changed, 16 insertions, 12 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index f3493bd96b..ef832756ce 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -23,7 +23,7 @@ import java.net._
import java.nio.ByteBuffer
import java.nio.channels.Channels
import java.nio.charset.StandardCharsets
-import java.nio.file.Files
+import java.nio.file.{Files, Paths}
import java.util.{Locale, Properties, Random, UUID}
import java.util.concurrent._
import java.util.concurrent.atomic.AtomicBoolean
@@ -1014,15 +1014,7 @@ private[spark] object Utils extends Logging {
* Check to see if file is a symbolic link.
*/
def isSymlink(file: File): Boolean = {
- if (file == null) throw new NullPointerException("File must not be null")
- if (isWindows) return false
- val fileInCanonicalDir = if (file.getParent() == null) {
- file
- } else {
- new File(file.getParentFile().getCanonicalFile(), file.getName())
- }
-
- !fileInCanonicalDir.getCanonicalFile().equals(fileInCanonicalDir.getAbsoluteFile())
+ return Files.isSymbolicLink(Paths.get(file.toURI))
}
/**
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index 31c8fb2646..732cbfaaee 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -649,8 +649,13 @@ class SparkSubmitSuite
// NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
private def runSparkSubmit(args: Seq[String]): Unit = {
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
+ val sparkSubmitFile = if (Utils.isWindows) {
+ new File("..\\bin\\spark-submit.cmd")
+ } else {
+ new File("../bin/spark-submit")
+ }
val process = Utils.executeCommand(
- Seq("./bin/spark-submit") ++ args,
+ Seq(sparkSubmitFile.getCanonicalPath) ++ args,
new File(sparkHome),
Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome))
diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index 01bef0a11c..a5eda7b5a5 100644
--- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -126,6 +126,8 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc
}
test("SPARK-3697: ignore directories that cannot be read.") {
+ // setReadable(...) does not work on Windows. Please refer JDK-6728842.
+ assume(!Utils.isWindows)
val logFile1 = newLogFile("new1", None, inProgress = false)
writeFile(logFile1, true, None,
SparkListenerApplicationStart("app1-1", Some("app1-1"), 1L, "test", None),
diff --git a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
index 9ed5016510..9e6b02b9ea 100644
--- a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala
@@ -22,10 +22,14 @@ import java.util.Arrays
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.io.ChunkedByteBuffer
+import org.apache.spark.util.Utils
class DiskStoreSuite extends SparkFunSuite {
test("reads of memory-mapped and non memory-mapped files are equivalent") {
+ // It will cause error when we tried to re-open the filestore and the
+ // memory-mapped byte buffer tot he file has not been GC on Windows.
+ assume(!Utils.isWindows)
val confKey = "spark.storage.memoryMapThreshold"
// Create a non-trivial (not all zeros) byte array
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
index 344d4aa6cf..c39005f6a1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
@@ -26,7 +26,6 @@ import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.parser.ParseException
-import org.apache.spark.sql.execution.command.DDLUtils
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.util.Utils
@@ -83,6 +82,8 @@ class CreateTableAsSelectSuite
}
test("CREATE TABLE USING AS SELECT based on the file without write permission") {
+ // setWritable(...) does not work on Windows. Please refer JDK-6728842.
+ assume(!Utils.isWindows)
val childPath = new File(path.toString, "child")
path.mkdir()
path.setWritable(false)