aboutsummaryrefslogtreecommitdiff
path: root/mllib/src/test
diff options
context:
space:
mode:
authorSean Zhong <seanzhong@databricks.com>2016-05-18 09:01:59 +0800
committerCheng Lian <lian@databricks.com>2016-05-18 09:01:59 +0800
commit25b315e6cad7c27b62dcaa2c194293c1115fdfb3 (patch)
treecfeebcaf553d78ca80a70f7139a765e7759f0410 /mllib/src/test
parentb674e67c22bf663334e537e35787c00533adbb04 (diff)
downloadspark-25b315e6cad7c27b62dcaa2c194293c1115fdfb3.tar.gz
spark-25b315e6cad7c27b62dcaa2c194293c1115fdfb3.tar.bz2
spark-25b315e6cad7c27b62dcaa2c194293c1115fdfb3.zip
[SPARK-15171][SQL] Remove the references to deprecated method dataset.registerTempTable
## What changes were proposed in this pull request? Update the unit test code, examples, and documents to remove calls to deprecated method `dataset.registerTempTable`. ## How was this patch tested? This PR only changes the unit test code, examples, and comments. It should be safe. This is a follow up of PR https://github.com/apache/spark/pull/12945 which was merged. Author: Sean Zhong <seanzhong@databricks.com> Closes #13098 from clockfly/spark-15171-remove-deprecation.
Diffstat (limited to 'mllib/src/test')
-rw-r--r--mllib/src/test/java/org/apache/spark/ml/JavaPipelineSuite.java2
-rw-r--r--mllib/src/test/java/org/apache/spark/ml/classification/JavaLogisticRegressionSuite.java10
-rw-r--r--mllib/src/test/java/org/apache/spark/ml/regression/JavaLinearRegressionSuite.java4
3 files changed, 8 insertions, 8 deletions
diff --git a/mllib/src/test/java/org/apache/spark/ml/JavaPipelineSuite.java b/mllib/src/test/java/org/apache/spark/ml/JavaPipelineSuite.java
index 46c26e8b92..a81a36d1b1 100644
--- a/mllib/src/test/java/org/apache/spark/ml/JavaPipelineSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/JavaPipelineSuite.java
@@ -68,7 +68,7 @@ public class JavaPipelineSuite {
Pipeline pipeline = new Pipeline()
.setStages(new PipelineStage[]{scaler, lr});
PipelineModel model = pipeline.fit(dataset);
- model.transform(dataset).registerTempTable("prediction");
+ model.transform(dataset).createOrReplaceTempView("prediction");
Dataset<Row> predictions = spark.sql("SELECT label, probability, prediction FROM prediction");
predictions.collectAsList();
}
diff --git a/mllib/src/test/java/org/apache/spark/ml/classification/JavaLogisticRegressionSuite.java b/mllib/src/test/java/org/apache/spark/ml/classification/JavaLogisticRegressionSuite.java
index 98abca221c..b8da04c26a 100644
--- a/mllib/src/test/java/org/apache/spark/ml/classification/JavaLogisticRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/classification/JavaLogisticRegressionSuite.java
@@ -54,7 +54,7 @@ public class JavaLogisticRegressionSuite implements Serializable {
List<LabeledPoint> points = generateLogisticInputAsList(1.0, 1.0, 100, 42);
datasetRDD = jsc.parallelize(points, 2);
dataset = spark.createDataFrame(datasetRDD, LabeledPoint.class);
- dataset.registerTempTable("dataset");
+ dataset.createOrReplaceTempView("dataset");
}
@After
@@ -68,7 +68,7 @@ public class JavaLogisticRegressionSuite implements Serializable {
LogisticRegression lr = new LogisticRegression();
Assert.assertEquals(lr.getLabelCol(), "label");
LogisticRegressionModel model = lr.fit(dataset);
- model.transform(dataset).registerTempTable("prediction");
+ model.transform(dataset).createOrReplaceTempView("prediction");
Dataset<Row> predictions = spark.sql("SELECT label, probability, prediction FROM prediction");
predictions.collectAsList();
// Check defaults
@@ -97,14 +97,14 @@ public class JavaLogisticRegressionSuite implements Serializable {
// Modify model params, and check that the params worked.
model.setThreshold(1.0);
- model.transform(dataset).registerTempTable("predAllZero");
+ model.transform(dataset).createOrReplaceTempView("predAllZero");
Dataset<Row> predAllZero = spark.sql("SELECT prediction, myProbability FROM predAllZero");
for (Row r : predAllZero.collectAsList()) {
Assert.assertEquals(0.0, r.getDouble(0), eps);
}
// Call transform with params, and check that the params worked.
model.transform(dataset, model.threshold().w(0.0), model.probabilityCol().w("myProb"))
- .registerTempTable("predNotAllZero");
+ .createOrReplaceTempView("predNotAllZero");
Dataset<Row> predNotAllZero = spark.sql("SELECT prediction, myProb FROM predNotAllZero");
boolean foundNonZero = false;
for (Row r : predNotAllZero.collectAsList()) {
@@ -130,7 +130,7 @@ public class JavaLogisticRegressionSuite implements Serializable {
LogisticRegressionModel model = lr.fit(dataset);
Assert.assertEquals(2, model.numClasses());
- model.transform(dataset).registerTempTable("transformed");
+ model.transform(dataset).createOrReplaceTempView("transformed");
Dataset<Row> trans1 = spark.sql("SELECT rawPrediction, probability FROM transformed");
for (Row row : trans1.collectAsList()) {
Vector raw = (Vector) row.get(0);
diff --git a/mllib/src/test/java/org/apache/spark/ml/regression/JavaLinearRegressionSuite.java b/mllib/src/test/java/org/apache/spark/ml/regression/JavaLinearRegressionSuite.java
index d3ef5f6fca..126aa6298f 100644
--- a/mllib/src/test/java/org/apache/spark/ml/regression/JavaLinearRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/ml/regression/JavaLinearRegressionSuite.java
@@ -50,7 +50,7 @@ public class JavaLinearRegressionSuite implements Serializable {
List<LabeledPoint> points = generateLogisticInputAsList(1.0, 1.0, 100, 42);
datasetRDD = jsc.parallelize(points, 2);
dataset = spark.createDataFrame(datasetRDD, LabeledPoint.class);
- dataset.registerTempTable("dataset");
+ dataset.createOrReplaceTempView("dataset");
}
@After
@@ -65,7 +65,7 @@ public class JavaLinearRegressionSuite implements Serializable {
assertEquals("label", lr.getLabelCol());
assertEquals("auto", lr.getSolver());
LinearRegressionModel model = lr.fit(dataset);
- model.transform(dataset).registerTempTable("prediction");
+ model.transform(dataset).createOrReplaceTempView("prediction");
Dataset<Row> predictions = spark.sql("SELECT label, prediction FROM prediction");
predictions.collect();
// Check defaults