aboutsummaryrefslogtreecommitdiff
path: root/yarn
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2015-04-13 09:29:04 -0700
committerReynold Xin <rxin@databricks.com>2015-04-13 09:29:04 -0700
commitc5b0b296b842926b5c07531a5affe8984bc799c5 (patch)
treea01bcfeee05deb3c01436835a0263b84d9f03880 /yarn
parent77620be76e82b6cdaae406cd752d3272656f5fe0 (diff)
downloadspark-c5b0b296b842926b5c07531a5affe8984bc799c5.tar.gz
spark-c5b0b296b842926b5c07531a5affe8984bc799c5.tar.bz2
spark-c5b0b296b842926b5c07531a5affe8984bc799c5.zip
[SPARK-6765] Enable scalastyle on test code.
Turn scalastyle on for all test code. Most of the violations have been resolved in my previous pull requests: Core: https://github.com/apache/spark/pull/5484 SQL: https://github.com/apache/spark/pull/5412 MLlib: https://github.com/apache/spark/pull/5411 GraphX: https://github.com/apache/spark/pull/5410 Streaming: https://github.com/apache/spark/pull/5409 Author: Reynold Xin <rxin@databricks.com> Closes #5486 from rxin/test-style-enable and squashes the following commits: 01683de [Reynold Xin] Fixed new code. a4ab46e [Reynold Xin] Fixed tests. 20adbc8 [Reynold Xin] Missed one violation. 5e36521 [Reynold Xin] [SPARK-6765] Enable scalastyle on test code.
Diffstat (limited to 'yarn')
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala19
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala6
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtilSuite.scala2
3 files changed, 18 insertions, 9 deletions
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
index 92f04b4b85..c1b94ac9c5 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala
@@ -232,19 +232,26 @@ class ClientSuite extends FunSuite with Matchers with BeforeAndAfterAll {
testCode(conf)
}
- def newEnv = MutableHashMap[String, String]()
+ def newEnv: MutableHashMap[String, String] = MutableHashMap[String, String]()
- def classpath(env: MutableHashMap[String, String]) = env(Environment.CLASSPATH.name).split(":|;|<CPS>")
+ def classpath(env: MutableHashMap[String, String]): Array[String] =
+ env(Environment.CLASSPATH.name).split(":|;|<CPS>")
- def flatten(a: Option[Seq[String]], b: Option[Seq[String]]) = (a ++ b).flatten.toArray
+ def flatten(a: Option[Seq[String]], b: Option[Seq[String]]): Array[String] =
+ (a ++ b).flatten.toArray
- def getFieldValue[A, B](clazz: Class[_], field: String, defaults: => B)(mapTo: A => B): B =
- Try(clazz.getField(field)).map(_.get(null).asInstanceOf[A]).toOption.map(mapTo).getOrElse(defaults)
+ def getFieldValue[A, B](clazz: Class[_], field: String, defaults: => B)(mapTo: A => B): B = {
+ Try(clazz.getField(field))
+ .map(_.get(null).asInstanceOf[A])
+ .toOption
+ .map(mapTo)
+ .getOrElse(defaults)
+ }
def getFieldValue2[A: ClassTag, A1: ClassTag, B](
clazz: Class[_],
field: String,
- defaults: => B)(mapTo: A => B)(mapTo1: A1 => B) : B = {
+ defaults: => B)(mapTo: A => B)(mapTo1: A1 => B): B = {
Try(clazz.getField(field)).map(_.get(null)).map {
case v: A => mapTo(v)
case v1: A1 => mapTo1(v1)
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala
index c09b01bafc..455f1019d8 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnAllocatorSuite.scala
@@ -79,7 +79,7 @@ class YarnAllocatorSuite extends FunSuite with Matchers with BeforeAndAfterEach
}
class MockSplitInfo(host: String) extends SplitInfo(null, host, null, 1, null) {
- override def equals(other: Any) = false
+ override def equals(other: Any): Boolean = false
}
def createAllocator(maxExecutors: Int = 5): YarnAllocator = {
@@ -118,7 +118,9 @@ class YarnAllocatorSuite extends FunSuite with Matchers with BeforeAndAfterEach
handler.getNumExecutorsRunning should be (1)
handler.allocatedContainerToHostMap.get(container.getId).get should be ("host1")
handler.allocatedHostToContainersMap.get("host1").get should contain (container.getId)
- rmClient.getMatchingRequests(container.getPriority, "host1", containerResource).size should be (0)
+
+ val size = rmClient.getMatchingRequests(container.getPriority, "host1", containerResource).size
+ size should be (0)
}
test("some containers allocated") {
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtilSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtilSuite.scala
index 4194f36499..9395316b71 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtilSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtilSuite.scala
@@ -46,7 +46,7 @@ class YarnSparkHadoopUtilSuite extends FunSuite with Matchers with Logging {
logWarning("Cannot execute bash, skipping bash tests.")
}
- def bashTest(name: String)(fn: => Unit) =
+ def bashTest(name: String)(fn: => Unit): Unit =
if (hasBash) test(name)(fn) else ignore(name)(fn)
bashTest("shell script escaping") {