aboutsummaryrefslogtreecommitdiff
path: root/examples/src/main/r/data-manipulation.R
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-06-16 12:46:25 -0700
committerShivaram Venkataraman <shivaram@cs.berkeley.edu>2016-06-16 12:46:25 -0700
commita865f6e05297f6121bb2fde717860f9edeed263e (patch)
tree874653a7d9736e654e26b996de6006d973038bb7 /examples/src/main/r/data-manipulation.R
parent9ea0d5e326e08b914aa46f1eec8795688a61bf74 (diff)
downloadspark-a865f6e05297f6121bb2fde717860f9edeed263e.tar.gz
spark-a865f6e05297f6121bb2fde717860f9edeed263e.tar.bz2
spark-a865f6e05297f6121bb2fde717860f9edeed263e.zip
[SPARK-15996][R] Fix R examples by removing deprecated functions
## What changes were proposed in this pull request? Currently, R examples(`dataframe.R` and `data-manipulation.R`) fail like the following. We had better update them before releasing 2.0 RC. This PR updates them to use up-to-date APIs. ```bash $ bin/spark-submit examples/src/main/r/dataframe.R ... Warning message: 'createDataFrame(sqlContext...)' is deprecated. Use 'createDataFrame(data, schema = NULL, samplingRatio = 1.0)' instead. See help("Deprecated") ... Warning message: 'read.json(sqlContext...)' is deprecated. Use 'read.json(path)' instead. See help("Deprecated") ... Error: could not find function "registerTempTable" Execution halted ``` ## How was this patch tested? Manual. ``` curl -LO http://s3-us-west-2.amazonaws.com/sparkr-data/flights.csv bin/spark-submit examples/src/main/r/dataframe.R bin/spark-submit examples/src/main/r/data-manipulation.R flights.csv ``` Author: Dongjoon Hyun <dongjoon@apache.org> Closes #13714 from dongjoon-hyun/SPARK-15996.
Diffstat (limited to 'examples/src/main/r/data-manipulation.R')
-rw-r--r--examples/src/main/r/data-manipulation.R8
1 files changed, 4 insertions, 4 deletions
diff --git a/examples/src/main/r/data-manipulation.R b/examples/src/main/r/data-manipulation.R
index 58a30135aa..badb98bc78 100644
--- a/examples/src/main/r/data-manipulation.R
+++ b/examples/src/main/r/data-manipulation.R
@@ -49,10 +49,10 @@ flights_df$date <- as.Date(flights_df$date)
SFO_df <- flights_df[flights_df$dest == "SFO", ]
# Convert the local data frame into a SparkDataFrame
-SFO_DF <- createDataFrame(sqlContext, SFO_df)
+SFO_DF <- createDataFrame(SFO_df)
# Directly create a SparkDataFrame from the source data
-flightsDF <- read.df(sqlContext, flightsCsvPath, source = "csv", header = "true")
+flightsDF <- read.df(flightsCsvPath, source = "csv", header = "true")
# Print the schema of this SparkDataFrame
printSchema(flightsDF)
@@ -75,8 +75,8 @@ destDF <- select(flightsDF, "dest", "cancelled")
# Using SQL to select columns of data
# First, register the flights SparkDataFrame as a table
-registerTempTable(flightsDF, "flightsTable")
-destDF <- sql(sqlContext, "SELECT dest, cancelled FROM flightsTable")
+createOrReplaceTempView(flightsDF, "flightsTable")
+destDF <- sql("SELECT dest, cancelled FROM flightsTable")
# Use collect to create a local R data frame
local_df <- collect(destDF)