diff options
author | felixcheung <felixcheung_m@hotmail.com> | 2016-01-17 09:29:08 -0800 |
---|---|---|
committer | Shivaram Venkataraman <shivaram@cs.berkeley.edu> | 2016-01-17 09:29:08 -0800 |
commit | 92502703f4a29c706539f5ba47fd58b6fc41c14d (patch) | |
tree | 19b21f4b91c6e06a39058a81e5cf3e8c468ae0cb /R | |
parent | cede7b2a1134a6c93aff20ed5625054d988d3659 (diff) | |
download | spark-92502703f4a29c706539f5ba47fd58b6fc41c14d.tar.gz spark-92502703f4a29c706539f5ba47fd58b6fc41c14d.tar.bz2 spark-92502703f4a29c706539f5ba47fd58b6fc41c14d.zip |
[SPARK-12862][SPARKR] Jenkins does not run R tests
Slight correction: I'm leaving sparkR as-is (ie. R file not supported) and fixed only run-tests.sh as shivaram described.
I also assume we are going to cover all doc changes in https://issues.apache.org/jira/browse/SPARK-12846 instead of here.
rxin shivaram zjffdu
Author: felixcheung <felixcheung_m@hotmail.com>
Closes #10792 from felixcheung/sparkRcmd.
Diffstat (limited to 'R')
-rw-r--r-- | R/pkg/inst/tests/testthat/test_sparkSQL.R | 2 | ||||
-rwxr-xr-x | R/run-tests.sh | 2 |
2 files changed, 2 insertions, 2 deletions
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R index 27ad9f3958..67ecdbc522 100644 --- a/R/pkg/inst/tests/testthat/test_sparkSQL.R +++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R @@ -1781,7 +1781,7 @@ test_that("Method coltypes() to get and set R's data types of a DataFrame", { expect_equal(coltypes(x), "map<string,string>") df <- selectExpr(read.json(sqlContext, jsonPath), "name", "(age * 1.21) as age") - expect_equal(dtypes(df), list(c("name", "string"), c("age", "decimal(24,2)"))) + expect_equal(dtypes(df), list(c("name", "string"), c("age", "double"))) df1 <- select(df, cast(df$age, "integer")) coltypes(df) <- c("character", "integer") diff --git a/R/run-tests.sh b/R/run-tests.sh index e64a4ea94c..9dcf0ace7d 100755 --- a/R/run-tests.sh +++ b/R/run-tests.sh @@ -23,7 +23,7 @@ FAILED=0 LOGFILE=$FWDIR/unit-tests.out rm -f $LOGFILE -SPARK_TESTING=1 $FWDIR/../bin/sparkR --driver-java-options "-Dlog4j.configuration=file:$FWDIR/log4j.properties" --conf spark.hadoop.fs.default.name="file:///" $FWDIR/pkg/tests/run-all.R 2>&1 | tee -a $LOGFILE +SPARK_TESTING=1 $FWDIR/../bin/spark-submit --driver-java-options "-Dlog4j.configuration=file:$FWDIR/log4j.properties" --conf spark.hadoop.fs.default.name="file:///" $FWDIR/pkg/tests/run-all.R 2>&1 | tee -a $LOGFILE FAILED=$((PIPESTATUS[0]||$FAILED)) if [[ $FAILED != 0 ]]; then |