aboutsummaryrefslogtreecommitdiff
path: root/R
diff options
context:
space:
mode:
authorhyukjinkwon <gurwls223@gmail.com>2017-03-14 19:51:25 -0700
committerFelix Cheung <felixcheung@apache.org>2017-03-14 19:51:25 -0700
commitd1f6c64c4b763c05d6d79ae5497f298dc3835f3e (patch)
tree3212b7cef66bde23763bba7358f7560a973d8beb /R
parent8fb2a02e2ce6832e3d9338a7d0148dfac9fa24c2 (diff)
downloadspark-d1f6c64c4b763c05d6d79ae5497f298dc3835f3e.tar.gz
spark-d1f6c64c4b763c05d6d79ae5497f298dc3835f3e.tar.bz2
spark-d1f6c64c4b763c05d6d79ae5497f298dc3835f3e.zip
[SPARK-19828][R] Support array type in from_json in R
## What changes were proposed in this pull request? Since we could not directly define the array type in R, this PR proposes to support array types in R as string types that are used in `structField` as below: ```R jsonArr <- "[{\"name\":\"Bob\"}, {\"name\":\"Alice\"}]" df <- as.DataFrame(list(list("people" = jsonArr))) collect(select(df, alias(from_json(df$people, "array<struct<name:string>>"), "arrcol"))) ``` prints ```R arrcol 1 Bob, Alice ``` ## How was this patch tested? Unit tests in `test_sparkSQL.R`. Author: hyukjinkwon <gurwls223@gmail.com> Closes #17178 from HyukjinKwon/SPARK-19828.
Diffstat (limited to 'R')
-rw-r--r--R/pkg/R/functions.R12
-rw-r--r--R/pkg/inst/tests/testthat/test_sparkSQL.R12
2 files changed, 22 insertions, 2 deletions
diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index edf2bcf8fd..9867f2d5b7 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -2437,6 +2437,7 @@ setMethod("date_format", signature(y = "Column", x = "character"),
#'
#' @param x Column containing the JSON string.
#' @param schema a structType object to use as the schema to use when parsing the JSON string.
+#' @param asJsonArray indicating if input string is JSON array of objects or a single object.
#' @param ... additional named properties to control how the json is parsed, accepts the same
#' options as the JSON data source.
#'
@@ -2452,11 +2453,18 @@ setMethod("date_format", signature(y = "Column", x = "character"),
#'}
#' @note from_json since 2.2.0
setMethod("from_json", signature(x = "Column", schema = "structType"),
- function(x, schema, ...) {
+ function(x, schema, asJsonArray = FALSE, ...) {
+ if (asJsonArray) {
+ jschema <- callJStatic("org.apache.spark.sql.types.DataTypes",
+ "createArrayType",
+ schema$jobj)
+ } else {
+ jschema <- schema$jobj
+ }
options <- varargsToStrEnv(...)
jc <- callJStatic("org.apache.spark.sql.functions",
"from_json",
- x@jc, schema$jobj, options)
+ x@jc, jschema, options)
column(jc)
})
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 9735fe3201..f7081cb1d4 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -1364,6 +1364,18 @@ test_that("column functions", {
# check for unparseable
df <- as.DataFrame(list(list("a" = "")))
expect_equal(collect(select(df, from_json(df$a, schema)))[[1]][[1]], NA)
+
+ # check if array type in string is correctly supported.
+ jsonArr <- "[{\"name\":\"Bob\"}, {\"name\":\"Alice\"}]"
+ df <- as.DataFrame(list(list("people" = jsonArr)))
+ schema <- structType(structField("name", "string"))
+ arr <- collect(select(df, alias(from_json(df$people, schema, asJsonArray = TRUE), "arrcol")))
+ expect_equal(ncol(arr), 1)
+ expect_equal(nrow(arr), 1)
+ expect_is(arr[[1]][[1]], "list")
+ expect_equal(length(arr$arrcol[[1]]), 2)
+ expect_equal(arr$arrcol[[1]][[1]]$name, "Bob")
+ expect_equal(arr$arrcol[[1]][[2]]$name, "Alice")
})
test_that("column binary mathfunctions", {