aboutsummaryrefslogtreecommitdiff
path: root/R/pkg
diff options
context:
space:
mode:
authorYu ISHIKAWA <yuu.ishikawa@gmail.com>2015-07-17 17:00:50 +0900
committerKousuke Saruta <sarutak@oss.nttdata.co.jp>2015-07-17 17:00:50 +0900
commit5a3c1ad087cb645a9496349ca021168e479ffae9 (patch)
treec43d2e88aa8d1a4db98f682f74a35876b1f991d6 /R/pkg
parent3f6d28a5ca98cf7d20c2c029094350cc4f9545a0 (diff)
downloadspark-5a3c1ad087cb645a9496349ca021168e479ffae9.tar.gz
spark-5a3c1ad087cb645a9496349ca021168e479ffae9.tar.bz2
spark-5a3c1ad087cb645a9496349ca021168e479ffae9.zip
[SPARK-9093] [SPARKR] Fix single-quotes strings in SparkR
[[SPARK-9093] Fix single-quotes strings in SparkR - ASF JIRA](https://issues.apache.org/jira/browse/SPARK-9093) This is the result of lintr at the rivision:011551620faa87107a787530f074af3d9be7e695 [[SPARK-9093] The result of lintr at 011551620faa87107a787530f074af3d9be7e695](https://gist.github.com/yu-iskw/8c47acf3202796da4d01) Author: Yu ISHIKAWA <yuu.ishikawa@gmail.com> Closes #7439 from yu-iskw/SPARK-9093 and squashes the following commits: 61c391e [Yu ISHIKAWA] [SPARK-9093][SparkR] Fix single-quotes strings in SparkR
Diffstat (limited to 'R/pkg')
-rw-r--r--R/pkg/R/DataFrame.R10
-rw-r--r--R/pkg/R/SQLContext.R4
-rw-r--r--R/pkg/R/serialize.R4
-rw-r--r--R/pkg/R/sparkR.R2
-rw-r--r--R/pkg/inst/tests/test_sparkSQL.R4
5 files changed, 12 insertions, 12 deletions
diff --git a/R/pkg/R/DataFrame.R b/R/pkg/R/DataFrame.R
index 208813768e..a58433df3c 100644
--- a/R/pkg/R/DataFrame.R
+++ b/R/pkg/R/DataFrame.R
@@ -1314,7 +1314,7 @@ setMethod("except",
#' write.df(df, "myfile", "parquet", "overwrite")
#' }
setMethod("write.df",
- signature(df = "DataFrame", path = 'character'),
+ signature(df = "DataFrame", path = "character"),
function(df, path, source = NULL, mode = "append", ...){
if (is.null(source)) {
sqlContext <- get(".sparkRSQLsc", envir = .sparkREnv)
@@ -1328,7 +1328,7 @@ setMethod("write.df",
jmode <- callJStatic("org.apache.spark.sql.api.r.SQLUtils", "saveMode", mode)
options <- varargsToEnv(...)
if (!is.null(path)) {
- options[['path']] <- path
+ options[["path"]] <- path
}
callJMethod(df@sdf, "save", source, jmode, options)
})
@@ -1337,7 +1337,7 @@ setMethod("write.df",
#' @aliases saveDF
#' @export
setMethod("saveDF",
- signature(df = "DataFrame", path = 'character'),
+ signature(df = "DataFrame", path = "character"),
function(df, path, source = NULL, mode = "append", ...){
write.df(df, path, source, mode, ...)
})
@@ -1375,8 +1375,8 @@ setMethod("saveDF",
#' saveAsTable(df, "myfile")
#' }
setMethod("saveAsTable",
- signature(df = "DataFrame", tableName = 'character', source = 'character',
- mode = 'character'),
+ signature(df = "DataFrame", tableName = "character", source = "character",
+ mode = "character"),
function(df, tableName, source = NULL, mode="append", ...){
if (is.null(source)) {
sqlContext <- get(".sparkRSQLsc", envir = .sparkREnv)
diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R
index 30978bb50d..110117a18c 100644
--- a/R/pkg/R/SQLContext.R
+++ b/R/pkg/R/SQLContext.R
@@ -457,7 +457,7 @@ dropTempTable <- function(sqlContext, tableName) {
read.df <- function(sqlContext, path = NULL, source = NULL, schema = NULL, ...) {
options <- varargsToEnv(...)
if (!is.null(path)) {
- options[['path']] <- path
+ options[["path"]] <- path
}
if (is.null(source)) {
sqlContext <- get(".sparkRSQLsc", envir = .sparkREnv)
@@ -506,7 +506,7 @@ loadDF <- function(sqlContext, path = NULL, source = NULL, schema = NULL, ...) {
createExternalTable <- function(sqlContext, tableName, path = NULL, source = NULL, ...) {
options <- varargsToEnv(...)
if (!is.null(path)) {
- options[['path']] <- path
+ options[["path"]] <- path
}
sdf <- callJMethod(sqlContext, "createExternalTable", tableName, source, options)
dataFrame(sdf)
diff --git a/R/pkg/R/serialize.R b/R/pkg/R/serialize.R
index 78535eff0d..311021e5d8 100644
--- a/R/pkg/R/serialize.R
+++ b/R/pkg/R/serialize.R
@@ -140,8 +140,8 @@ writeType <- function(con, class) {
jobj = "j",
environment = "e",
Date = "D",
- POSIXlt = 't',
- POSIXct = 't',
+ POSIXlt = "t",
+ POSIXct = "t",
stop(paste("Unsupported type for serialization", class)))
writeBin(charToRaw(type), con)
}
diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R
index 172335809d..79b79d7094 100644
--- a/R/pkg/R/sparkR.R
+++ b/R/pkg/R/sparkR.R
@@ -140,7 +140,7 @@ sparkR.init <- function(
if (!file.exists(path)) {
stop("JVM is not ready after 10 seconds")
}
- f <- file(path, open='rb')
+ f <- file(path, open="rb")
backendPort <- readInt(f)
monitorPort <- readInt(f)
close(f)
diff --git a/R/pkg/inst/tests/test_sparkSQL.R b/R/pkg/inst/tests/test_sparkSQL.R
index cdfe6481f6..a3039d36c9 100644
--- a/R/pkg/inst/tests/test_sparkSQL.R
+++ b/R/pkg/inst/tests/test_sparkSQL.R
@@ -57,9 +57,9 @@ test_that("infer types", {
expect_equal(infer_type(as.Date("2015-03-11")), "date")
expect_equal(infer_type(as.POSIXlt("2015-03-11 12:13:04.043")), "timestamp")
expect_equal(infer_type(c(1L, 2L)),
- list(type = 'array', elementType = "integer", containsNull = TRUE))
+ list(type = "array", elementType = "integer", containsNull = TRUE))
expect_equal(infer_type(list(1L, 2L)),
- list(type = 'array', elementType = "integer", containsNull = TRUE))
+ list(type = "array", elementType = "integer", containsNull = TRUE))
testStruct <- infer_type(list(a = 1L, b = "2"))
expect_equal(class(testStruct), "structType")
checkStructField(testStruct$fields()[[1]], "a", "IntegerType", TRUE)