aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorhyukjinkwon <gurwls223@gmail.com>2017-04-17 09:04:24 -0700
committerFelix Cheung <felixcheung@apache.org>2017-04-17 09:04:24 -0700
commit24f09b39c7b947e52fda952676d5114c2540e732 (patch)
treec44e855041a0cea70c4fccc05627945acfce2a56
parent86d251c58591278a7c88745a1049e7a41db11964 (diff)
downloadspark-24f09b39c7b947e52fda952676d5114c2540e732.tar.gz
spark-24f09b39c7b947e52fda952676d5114c2540e732.tar.bz2
spark-24f09b39c7b947e52fda952676d5114c2540e732.zip
[SPARK-19828][R][FOLLOWUP] Rename asJsonArray to as.json.array in from_json function in R
## What changes were proposed in this pull request? This was suggested to be `as.json.array` at the first place in the PR to SPARK-19828 but we could not do this as the lint check emits an error for multiple dots in the variable names. After SPARK-20278, now we are able to use `multiple.dots.in.names`. `asJsonArray` in `from_json` function is still able to be changed as 2.2 is not released yet. So, this PR proposes to rename `asJsonArray` to `as.json.array`. ## How was this patch tested? Jenkins tests, local tests with `./R/run-tests.sh` and manual `./dev/lint-r`. Existing tests should cover this. Author: hyukjinkwon <gurwls223@gmail.com> Closes #17653 from HyukjinKwon/SPARK-19828-followup.
-rw-r--r--R/pkg/R/functions.R8
-rw-r--r--R/pkg/inst/tests/testthat/test_sparkSQL.R2
2 files changed, 5 insertions, 5 deletions
diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index 449476dec5..c311921fb3 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -2438,12 +2438,12 @@ setMethod("date_format", signature(y = "Column", x = "character"),
#' from_json
#'
#' Parses a column containing a JSON string into a Column of \code{structType} with the specified
-#' \code{schema} or array of \code{structType} if \code{asJsonArray} is set to \code{TRUE}.
+#' \code{schema} or array of \code{structType} if \code{as.json.array} is set to \code{TRUE}.
#' If the string is unparseable, the Column will contains the value NA.
#'
#' @param x Column containing the JSON string.
#' @param schema a structType object to use as the schema to use when parsing the JSON string.
-#' @param asJsonArray indicating if input string is JSON array of objects or a single object.
+#' @param as.json.array indicating if input string is JSON array of objects or a single object.
#' @param ... additional named properties to control how the json is parsed, accepts the same
#' options as the JSON data source.
#'
@@ -2459,8 +2459,8 @@ setMethod("date_format", signature(y = "Column", x = "character"),
#'}
#' @note from_json since 2.2.0
setMethod("from_json", signature(x = "Column", schema = "structType"),
- function(x, schema, asJsonArray = FALSE, ...) {
- if (asJsonArray) {
+ function(x, schema, as.json.array = FALSE, ...) {
+ if (as.json.array) {
jschema <- callJStatic("org.apache.spark.sql.types.DataTypes",
"createArrayType",
schema$jobj)
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 3fbb618ddf..6a6c9a809a 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -1454,7 +1454,7 @@ test_that("column functions", {
jsonArr <- "[{\"name\":\"Bob\"}, {\"name\":\"Alice\"}]"
df <- as.DataFrame(list(list("people" = jsonArr)))
schema <- structType(structField("name", "string"))
- arr <- collect(select(df, alias(from_json(df$people, schema, asJsonArray = TRUE), "arrcol")))
+ arr <- collect(select(df, alias(from_json(df$people, schema, as.json.array = TRUE), "arrcol")))
expect_equal(ncol(arr), 1)
expect_equal(nrow(arr), 1)
expect_is(arr[[1]][[1]], "list")