From 27209252f09ff73c58e60c6df8aaba73b308088c Mon Sep 17 00:00:00 2001 From: Xin Ren Date: Tue, 30 Aug 2016 11:24:55 +0100 Subject: [MINOR][MLLIB][SQL] Clean up unused variables and unused import ## What changes were proposed in this pull request? Clean up unused variables and unused import statements, unnecessary `return` and `toArray`, and some more style improvement, when I walk through the code examples. ## How was this patch tested? Testet manually on local laptop. Author: Xin Ren Closes #14836 from keypointt/codeWalkThroughML. --- .../src/main/scala/org/apache/spark/ml/feature/Interaction.scala | 2 +- .../scala/org/apache/spark/ml/r/IsotonicRegressionWrapper.scala | 2 +- mllib/src/main/scala/org/apache/spark/ml/util/stopwatches.scala | 2 +- .../main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala | 2 +- .../src/main/scala/org/apache/spark/mllib/random/RandomRDDs.scala | 8 ++++---- 5 files changed, 8 insertions(+), 8 deletions(-) (limited to 'mllib') diff --git a/mllib/src/main/scala/org/apache/spark/ml/feature/Interaction.scala b/mllib/src/main/scala/org/apache/spark/ml/feature/Interaction.scala index 96d0bdee9e..902f84f862 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/feature/Interaction.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/feature/Interaction.scala @@ -136,7 +136,7 @@ class Interaction @Since("1.6.0") (@Since("1.6.0") override val uid: String) ext case _: VectorUDT => val attrs = AttributeGroup.fromStructField(f).attributes.getOrElse( throw new SparkException("Vector attributes must be defined for interaction.")) - attrs.map(getNumFeatures).toArray + attrs.map(getNumFeatures) } new FeatureEncoder(numFeatures) }.toArray diff --git a/mllib/src/main/scala/org/apache/spark/ml/r/IsotonicRegressionWrapper.scala b/mllib/src/main/scala/org/apache/spark/ml/r/IsotonicRegressionWrapper.scala index 1ea80cb46a..a7992debe6 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/r/IsotonicRegressionWrapper.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/r/IsotonicRegressionWrapper.scala @@ -23,7 +23,7 @@ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ import org.apache.spark.ml.{Pipeline, PipelineModel} -import org.apache.spark.ml.attribute.{AttributeGroup} +import org.apache.spark.ml.attribute.AttributeGroup import org.apache.spark.ml.feature.RFormula import org.apache.spark.ml.regression.{IsotonicRegression, IsotonicRegressionModel} import org.apache.spark.ml.util._ diff --git a/mllib/src/main/scala/org/apache/spark/ml/util/stopwatches.scala b/mllib/src/main/scala/org/apache/spark/ml/util/stopwatches.scala index e79b1f3164..e539deca4b 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/util/stopwatches.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/util/stopwatches.scala @@ -20,7 +20,7 @@ package org.apache.spark.ml.util import scala.collection.mutable import org.apache.spark.SparkContext -import org.apache.spark.util.LongAccumulator; +import org.apache.spark.util.LongAccumulator /** * Abstract class for stopwatches. diff --git a/mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala b/mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala index 56fb2d33c2..33a1f18bcc 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/feature/ChiSqSelector.scala @@ -164,7 +164,7 @@ object ChiSqSelectorModel extends Loader[ChiSqSelectorModel] { case Row(feature: Int) => (feature) }.collect() - return new ChiSqSelectorModel(features) + new ChiSqSelectorModel(features) } } } diff --git a/mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDs.scala b/mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDs.scala index c2bc1f17cc..6d60136ddc 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDs.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/random/RandomRDDs.scala @@ -438,10 +438,10 @@ object RandomRDDs { @DeveloperApi @Since("1.6.0") def randomJavaRDD[T]( - jsc: JavaSparkContext, - generator: RandomDataGenerator[T], - size: Long): JavaRDD[T] = { - randomJavaRDD(jsc, generator, size, 0); + jsc: JavaSparkContext, + generator: RandomDataGenerator[T], + size: Long): JavaRDD[T] = { + randomJavaRDD(jsc, generator, size, 0) } // TODO Generate RDD[Vector] from multivariate distributions. -- cgit v1.2.3