From 1c403733b89258e57daf7b8b0a2011981ad7ed8a Mon Sep 17 00:00:00 2001 From: hyukjinkwon Date: Thu, 26 May 2016 20:55:06 -0700 Subject: [SPARK-8603][SPARKR] Use shell() instead of system2() for SparkR on Windows ## What changes were proposed in this pull request? This PR corrects SparkR to use `shell()` instead of `system2()` on Windows. Using `system2(...)` on Windows does not process windows file separator `\`. `shell(tralsate = TRUE, ...)` can treat this problem. So, this was changed to be chosen according to OS. Existing tests were failed on Windows due to this problem. For example, those were failed. ``` 8. Failure: sparkJars tag in SparkContext (test_includeJAR.R#34) 9. Failure: sparkJars tag in SparkContext (test_includeJAR.R#36) ``` The cases above were due to using of `system2`. In addition, this PR also fixes some tests failed on Windows. ``` 5. Failure: sparkJars sparkPackages as comma-separated strings (test_context.R#128) 6. Failure: sparkJars sparkPackages as comma-separated strings (test_context.R#131) 7. Failure: sparkJars sparkPackages as comma-separated strings (test_context.R#134) ``` The cases above were due to a weird behaviour of `normalizePath()`. On Linux, if the path does not exist, it just prints out the input but it prints out including the current path on Windows. ```r # On Linus path <- normalizePath("aa") print(path) [1] "aa" # On Windows path <- normalizePath("aa") print(path) [1] "C:\\Users\\aa" ``` ## How was this patch tested? Jenkins tests and manually tested in a Window machine as below: Here is the [stdout](https://gist.github.com/HyukjinKwon/4bf35184f3a30f3bce987a58ec2bbbab) of testing. Closes #7025 Author: hyukjinkwon Author: Hyukjin Kwon Author: Prakash PC Closes #13165 from HyukjinKwon/pr/7025. --- R/pkg/inst/tests/testthat/test_Windows.R | 26 ++++++++++++++++++++++++++ R/pkg/inst/tests/testthat/test_context.R | 6 +++--- R/pkg/inst/tests/testthat/test_includeJAR.R | 7 +++---- 3 files changed, 32 insertions(+), 7 deletions(-) create mode 100644 R/pkg/inst/tests/testthat/test_Windows.R (limited to 'R/pkg/inst') diff --git a/R/pkg/inst/tests/testthat/test_Windows.R b/R/pkg/inst/tests/testthat/test_Windows.R new file mode 100644 index 0000000000..8813e18a1f --- /dev/null +++ b/R/pkg/inst/tests/testthat/test_Windows.R @@ -0,0 +1,26 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +context("Windows-specific tests") + +test_that("sparkJars tag in SparkContext", { + if (.Platform$OS.type != "windows") { + skip("This test is only for Windows, skipped") + } + testOutput <- launchScript("ECHO", "a/b/c", capture = TRUE) + abcPath <- testOutput[1] + expect_equal(abcPath, "a\\b\\c") +}) diff --git a/R/pkg/inst/tests/testthat/test_context.R b/R/pkg/inst/tests/testthat/test_context.R index c1f1a8932d..15915e2d13 100644 --- a/R/pkg/inst/tests/testthat/test_context.R +++ b/R/pkg/inst/tests/testthat/test_context.R @@ -129,13 +129,13 @@ test_that("getClientModeSparkSubmitOpts() returns spark-submit args from whiteli test_that("sparkJars sparkPackages as comma-separated strings", { expect_warning(processSparkJars(" a, b ")) jars <- suppressWarnings(processSparkJars(" a, b ")) - expect_equal(jars, c("a", "b")) + expect_equal(lapply(jars, basename), list("a", "b")) jars <- suppressWarnings(processSparkJars(" abc ,, def ")) - expect_equal(jars, c("abc", "def")) + expect_equal(lapply(jars, basename), list("abc", "def")) jars <- suppressWarnings(processSparkJars(c(" abc ,, def ", "", "xyz", " ", "a,b"))) - expect_equal(jars, c("abc", "def", "xyz", "a", "b")) + expect_equal(lapply(jars, basename), list("abc", "def", "xyz", "a", "b")) p <- processSparkPackages(c("ghi", "lmn")) expect_equal(p, c("ghi", "lmn")) diff --git a/R/pkg/inst/tests/testthat/test_includeJAR.R b/R/pkg/inst/tests/testthat/test_includeJAR.R index f89aa8e507..512dd39cb2 100644 --- a/R/pkg/inst/tests/testthat/test_includeJAR.R +++ b/R/pkg/inst/tests/testthat/test_includeJAR.R @@ -21,10 +21,9 @@ runScript <- function() { sparkTestJarPath <- "R/lib/SparkR/test_support/sparktestjar_2.10-1.0.jar" jarPath <- paste("--jars", shQuote(file.path(sparkHome, sparkTestJarPath))) scriptPath <- file.path(sparkHome, "R/lib/SparkR/tests/testthat/jarTest.R") - submitPath <- file.path(sparkHome, "bin/spark-submit") - res <- system2(command = submitPath, - args = c(jarPath, scriptPath), - stdout = TRUE) + submitPath <- file.path(sparkHome, paste("bin/", determineSparkSubmitBin(), sep = "")) + combinedArgs <- paste(jarPath, scriptPath, sep = " ") + res <- launchScript(submitPath, combinedArgs, capture = TRUE) tail(res, 2) } -- cgit v1.2.3