aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarcelo Vanzin <vanzin@cloudera.com>2015-09-15 10:45:02 -0700
committerMarcelo Vanzin <vanzin@cloudera.com>2015-09-15 10:45:02 -0700
commit8abef21dac1a6538c4e4e0140323b83d804d602b (patch)
treef95a782626def50ae2977ffc9c63057daf1d9afa
parentc35fdcb7e9c01271ce560dba4e0bd37569c8f5d1 (diff)
downloadspark-8abef21dac1a6538c4e4e0140323b83d804d602b.tar.gz
spark-8abef21dac1a6538c4e4e0140323b83d804d602b.tar.bz2
spark-8abef21dac1a6538c4e4e0140323b83d804d602b.zip
[SPARK-10300] [BUILD] [TESTS] Add support for test tags in run-tests.py.
This change does two things: - tag a few tests and adds the mechanism in the build to be able to disable those tags, both in maven and sbt, for both junit and scalatest suites. - add some logic to run-tests.py to disable some tags depending on what files have changed; that's used to disable expensive tests when a module hasn't explicitly been changed, to speed up testing for changes that don't directly affect those modules. Author: Marcelo Vanzin <vanzin@cloudera.com> Closes #8437 from vanzin/test-tags.
-rw-r--r--core/pom.xml10
-rwxr-xr-xdev/run-tests.py19
-rw-r--r--dev/sparktestsupport/modules.py24
-rw-r--r--external/flume/pom.xml10
-rw-r--r--external/kafka/pom.xml10
-rw-r--r--external/mqtt/pom.xml10
-rw-r--r--external/twitter/pom.xml10
-rw-r--r--external/zeromq/pom.xml10
-rw-r--r--extras/java8-tests/pom.xml10
-rw-r--r--extras/kinesis-asl/pom.xml5
-rw-r--r--launcher/pom.xml5
-rw-r--r--mllib/pom.xml10
-rw-r--r--network/common/pom.xml10
-rw-r--r--network/shuffle/pom.xml10
-rw-r--r--pom.xml17
-rw-r--r--project/SparkBuild.scala13
-rw-r--r--sql/core/pom.xml5
-rw-r--r--sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala2
-rw-r--r--sql/hive/pom.xml5
-rw-r--r--sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java26
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala2
-rw-r--r--streaming/pom.xml10
-rw-r--r--unsafe/pom.xml10
-rw-r--r--yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java26
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala1
-rw-r--r--yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala1
26 files changed, 124 insertions, 147 deletions
diff --git a/core/pom.xml b/core/pom.xml
index e31d90f608..8a20181096 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -332,16 +332,6 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-test</artifactId>
<scope>test</scope>
diff --git a/dev/run-tests.py b/dev/run-tests.py
index d8b22e1665..1a81658518 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -118,6 +118,14 @@ def determine_modules_to_test(changed_modules):
return modules_to_test.union(set(changed_modules))
+def determine_tags_to_exclude(changed_modules):
+ tags = []
+ for m in modules.all_modules:
+ if m not in changed_modules:
+ tags += m.test_tags
+ return tags
+
+
# -------------------------------------------------------------------------------------------------
# Functions for working with subprocesses and shell tools
# -------------------------------------------------------------------------------------------------
@@ -369,6 +377,7 @@ def detect_binary_inop_with_mima():
def run_scala_tests_maven(test_profiles):
mvn_test_goals = ["test", "--fail-at-end"]
+
profiles_and_goals = test_profiles + mvn_test_goals
print("[info] Running Spark tests using Maven with these arguments: ",
@@ -392,7 +401,7 @@ def run_scala_tests_sbt(test_modules, test_profiles):
exec_sbt(profiles_and_goals)
-def run_scala_tests(build_tool, hadoop_version, test_modules):
+def run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags):
"""Function to properly execute all tests passed in as a set from the
`determine_test_suites` function"""
set_title_and_block("Running Spark unit tests", "BLOCK_SPARK_UNIT_TESTS")
@@ -401,6 +410,10 @@ def run_scala_tests(build_tool, hadoop_version, test_modules):
test_profiles = get_hadoop_profiles(hadoop_version) + \
list(set(itertools.chain.from_iterable(m.build_profile_flags for m in test_modules)))
+
+ if excluded_tags:
+ test_profiles += ['-Dtest.exclude.tags=' + ",".join(excluded_tags)]
+
if build_tool == "maven":
run_scala_tests_maven(test_profiles)
else:
@@ -500,8 +513,10 @@ def main():
target_branch = os.environ["ghprbTargetBranch"]
changed_files = identify_changed_files_from_git_commits("HEAD", target_branch=target_branch)
changed_modules = determine_modules_for_files(changed_files)
+ excluded_tags = determine_tags_to_exclude(changed_modules)
if not changed_modules:
changed_modules = [modules.root]
+ excluded_tags = []
print("[info] Found the following changed modules:",
", ".join(x.name for x in changed_modules))
@@ -541,7 +556,7 @@ def main():
detect_binary_inop_with_mima()
# run the test suites
- run_scala_tests(build_tool, hadoop_version, test_modules)
+ run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags)
modules_with_python_tests = [m for m in test_modules if m.python_test_goals]
if modules_with_python_tests:
diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py
index 346452f317..65397f1f3e 100644
--- a/dev/sparktestsupport/modules.py
+++ b/dev/sparktestsupport/modules.py
@@ -31,7 +31,7 @@ class Module(object):
def __init__(self, name, dependencies, source_file_regexes, build_profile_flags=(), environ={},
sbt_test_goals=(), python_test_goals=(), blacklisted_python_implementations=(),
- should_run_r_tests=False):
+ test_tags=(), should_run_r_tests=False):
"""
Define a new module.
@@ -50,6 +50,8 @@ class Module(object):
:param blacklisted_python_implementations: A set of Python implementations that are not
supported by this module's Python components. The values in this set should match
strings returned by Python's `platform.python_implementation()`.
+ :param test_tags A set of tags that will be excluded when running unit tests if the module
+ is not explicitly changed.
:param should_run_r_tests: If true, changes in this module will trigger all R tests.
"""
self.name = name
@@ -60,6 +62,7 @@ class Module(object):
self.environ = environ
self.python_test_goals = python_test_goals
self.blacklisted_python_implementations = blacklisted_python_implementations
+ self.test_tags = test_tags
self.should_run_r_tests = should_run_r_tests
self.dependent_modules = set()
@@ -85,6 +88,9 @@ sql = Module(
"catalyst/test",
"sql/test",
"hive/test",
+ ],
+ test_tags=[
+ "org.apache.spark.sql.hive.ExtendedHiveTest"
]
)
@@ -398,6 +404,22 @@ ec2 = Module(
)
+yarn = Module(
+ name="yarn",
+ dependencies=[],
+ source_file_regexes=[
+ "yarn/",
+ "network/yarn/",
+ ],
+ sbt_test_goals=[
+ "yarn/test",
+ "network-yarn/test",
+ ],
+ test_tags=[
+ "org.apache.spark.deploy.yarn.ExtendedYarnTest"
+ ]
+)
+
# The root module is a dummy module which is used to run all of the tests.
# No other modules should directly depend on this module.
root = Module(
diff --git a/external/flume/pom.xml b/external/flume/pom.xml
index 132062f94f..3154e36c21 100644
--- a/external/flume/pom.xml
+++ b/external/flume/pom.xml
@@ -66,16 +66,6 @@
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
diff --git a/external/kafka/pom.xml b/external/kafka/pom.xml
index 05abd9e2e6..7d0d46dadc 100644
--- a/external/kafka/pom.xml
+++ b/external/kafka/pom.xml
@@ -86,16 +86,6 @@
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
diff --git a/external/mqtt/pom.xml b/external/mqtt/pom.xml
index 05e6338a08..913c47d33f 100644
--- a/external/mqtt/pom.xml
+++ b/external/mqtt/pom.xml
@@ -59,16 +59,6 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.apache.activemq</groupId>
<artifactId>activemq-core</artifactId>
<version>5.7.0</version>
diff --git a/external/twitter/pom.xml b/external/twitter/pom.xml
index 244ad58ae9..9137bf25ee 100644
--- a/external/twitter/pom.xml
+++ b/external/twitter/pom.xml
@@ -58,16 +58,6 @@
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
diff --git a/external/zeromq/pom.xml b/external/zeromq/pom.xml
index 171df8682c..6fec4f0e8a 100644
--- a/external/zeromq/pom.xml
+++ b/external/zeromq/pom.xml
@@ -57,16 +57,6 @@
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
diff --git a/extras/java8-tests/pom.xml b/extras/java8-tests/pom.xml
index 81794a8536..dba3dda8a9 100644
--- a/extras/java8-tests/pom.xml
+++ b/extras/java8-tests/pom.xml
@@ -58,16 +58,6 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<profiles>
diff --git a/extras/kinesis-asl/pom.xml b/extras/kinesis-asl/pom.xml
index 6dd8ff69c2..760f183a2e 100644
--- a/extras/kinesis-asl/pom.xml
+++ b/extras/kinesis-asl/pom.xml
@@ -74,11 +74,6 @@
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
diff --git a/launcher/pom.xml b/launcher/pom.xml
index ed38e66aa2..80696280a1 100644
--- a/launcher/pom.xml
+++ b/launcher/pom.xml
@@ -43,11 +43,6 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
diff --git a/mllib/pom.xml b/mllib/pom.xml
index 22c0c6008b..5dedacb388 100644
--- a/mllib/pom.xml
+++ b/mllib/pom.xml
@@ -95,16 +95,6 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
diff --git a/network/common/pom.xml b/network/common/pom.xml
index 1cc054a893..9c12cca0df 100644
--- a/network/common/pom.xml
+++ b/network/common/pom.xml
@@ -65,16 +65,6 @@
<!-- Test dependencies -->
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>test</scope>
diff --git a/network/shuffle/pom.xml b/network/shuffle/pom.xml
index 7a66c96804..e4f4c57b68 100644
--- a/network/shuffle/pom.xml
+++ b/network/shuffle/pom.xml
@@ -79,16 +79,6 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<scope>test</scope>
diff --git a/pom.xml b/pom.xml
index 6535994641..2927d3e107 100644
--- a/pom.xml
+++ b/pom.xml
@@ -181,6 +181,7 @@
<libthrift.version>0.9.2</libthrift.version>
<test.java.home>${java.home}</test.java.home>
+ <test.exclude.tags></test.exclude.tags>
<!--
Dependency scopes that can be overridden by enabling certain profiles. These profiles are
@@ -339,6 +340,16 @@
<artifactId>scalatest_${scala.binary.version}</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.novocode</groupId>
+ <artifactId>junit-interface</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<dependencyManagement>
<dependencies>
@@ -742,7 +753,7 @@
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
- <version>4.10</version>
+ <version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -760,7 +771,7 @@
<dependency>
<groupId>com.novocode</groupId>
<artifactId>junit-interface</artifactId>
- <version>0.10</version>
+ <version>0.11</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -1915,6 +1926,7 @@
<test.src.tables>src</test.src.tables>
</systemProperties>
<failIfNoTests>false</failIfNoTests>
+ <excludedGroups>${test.exclude.tags}</excludedGroups>
</configuration>
</plugin>
<!-- Scalatest runs all Scala tests -->
@@ -1952,6 +1964,7 @@
<!-- Needed by sql/hive tests. -->
<test.src.tables>__not_used__</test.src.tables>
</systemProperties>
+ <tagsToExclude>${test.exclude.tags}</tagsToExclude>
</configuration>
<executions>
<execution>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 901cfa538d..d80d300f1c 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -567,11 +567,20 @@ object TestSettings {
javaOptions in Test ++= "-Xmx3g -Xss4096k -XX:PermSize=128M -XX:MaxNewSize=256m -XX:MaxPermSize=1g"
.split(" ").toSeq,
javaOptions += "-Xmx3g",
+ // Exclude tags defined in a system property
+ testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest,
+ sys.props.get("test.exclude.tags").map { tags =>
+ tags.split(",").flatMap { tag => Seq("-l", tag) }.toSeq
+ }.getOrElse(Nil): _*),
+ testOptions in Test += Tests.Argument(TestFrameworks.JUnit,
+ sys.props.get("test.exclude.tags").map { tags =>
+ Seq("--exclude-categories=" + tags)
+ }.getOrElse(Nil): _*),
// Show full stack trace and duration in test cases.
testOptions in Test += Tests.Argument("-oDF"),
- testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
+ testOptions in Test += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
// Enable Junit testing.
- libraryDependencies += "com.novocode" % "junit-interface" % "0.9" % "test",
+ libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % "test",
// Only allow one test at a time, even across projects, since they run in the same JVM
parallelExecution in Test := false,
// Make sure the test temp directory exists.
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index 465aa3a388..fa6732db18 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -74,11 +74,6 @@
<version>${fasterxml.jackson.version}</version>
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<scope>test</scope>
diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
index ab309e0a1d..ffc4c32794 100644
--- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
+++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveCompatibilitySuite.scala
@@ -24,11 +24,13 @@ import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.scalatest.BeforeAndAfter
import org.apache.spark.sql.SQLConf
+import org.apache.spark.sql.hive.ExtendedHiveTest
import org.apache.spark.sql.hive.test.TestHive
/**
* Runs the test cases that are included in the hive distribution.
*/
+@ExtendedHiveTest
class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
// TODO: bundle in jar files... get from classpath
private lazy val hiveQueryDir = TestHive.getHiveFile(
diff --git a/sql/hive/pom.xml b/sql/hive/pom.xml
index ac67fe5f47..82cfeb2bb9 100644
--- a/sql/hive/pom.xml
+++ b/sql/hive/pom.xml
@@ -161,11 +161,6 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<type>test-jar</type>
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java b/sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java
new file mode 100644
index 0000000000..e2183183fb
--- /dev/null
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/ExtendedHiveTest.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.hive;
+
+import java.lang.annotation.*;
+import org.scalatest.TagAnnotation;
+
+@TagAnnotation
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.METHOD, ElementType.TYPE})
+public @interface ExtendedHiveTest { }
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index f0bb77092c..888d1b7b45 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -23,6 +23,7 @@ import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{Logging, SparkFunSuite}
import org.apache.spark.sql.catalyst.expressions.{NamedExpression, Literal, AttributeReference, EqualTo}
import org.apache.spark.sql.catalyst.util.quietly
+import org.apache.spark.sql.hive.ExtendedHiveTest
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.util.Utils
@@ -32,6 +33,7 @@ import org.apache.spark.util.Utils
* sure that reflective calls are not throwing NoSuchMethod error, but the actually functionality
* is not fully tested.
*/
+@ExtendedHiveTest
class VersionsSuite extends SparkFunSuite with Logging {
// Do not use a temp path here to speed up subsequent executions of the unit test during
diff --git a/streaming/pom.xml b/streaming/pom.xml
index 5cc9001b0e..1e6ee009ca 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -85,20 +85,10 @@
<scope>test</scope>
</dependency>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.seleniumhq.selenium</groupId>
<artifactId>selenium-java</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
diff --git a/unsafe/pom.xml b/unsafe/pom.xml
index 066abe92e5..4e8b9a84bb 100644
--- a/unsafe/pom.xml
+++ b/unsafe/pom.xml
@@ -56,16 +56,6 @@
<!-- Test dependencies -->
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.novocode</groupId>
- <artifactId>junit-interface</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
diff --git a/yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java b/yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java
new file mode 100644
index 0000000000..7a8f2fe979
--- /dev/null
+++ b/yarn/src/test/java/org/apache/spark/deploy/yarn/ExtendedYarnTest.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.deploy.yarn;
+
+import java.lang.annotation.*;
+import org.scalatest.TagAnnotation;
+
+@TagAnnotation
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.METHOD, ElementType.TYPE})
+public @interface ExtendedYarnTest { }
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index b5a42fd6af..105c3090d4 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -39,6 +39,7 @@ import org.apache.spark.util.Utils
* applications, and require the Spark assembly to be built before they can be successfully
* run.
*/
+@ExtendedYarnTest
class YarnClusterSuite extends BaseYarnClusterSuite {
override def newYarnConfig(): YarnConfiguration = new YarnConfiguration()
diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
index 8d9c9b3004..4700e2428d 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
@@ -32,6 +32,7 @@ import org.apache.spark.network.yarn.{YarnShuffleService, YarnTestAccessor}
/**
* Integration test for the external shuffle service with a yarn mini-cluster
*/
+@ExtendedYarnTest
class YarnShuffleIntegrationSuite extends BaseYarnClusterSuite {
override def newYarnConfig(): YarnConfiguration = {