aboutsummaryrefslogtreecommitdiff
path: root/extras
diff options
context:
space:
mode:
authorzsxwing <zsxwing@gmail.com>2015-07-27 23:34:29 -0700
committerTathagata Das <tathagata.das1565@gmail.com>2015-07-27 23:34:29 -0700
commitd93ab93d673c5007a1edb90a424b451c91c8a285 (patch)
tree15c6217c820d7db7ef1ddb8a108d54c8733f41b4 /extras
parent9c5612f4e197dec82a5eac9542896d6216a866b7 (diff)
downloadspark-d93ab93d673c5007a1edb90a424b451c91c8a285.tar.gz
spark-d93ab93d673c5007a1edb90a424b451c91c8a285.tar.bz2
spark-d93ab93d673c5007a1edb90a424b451c91c8a285.zip
[SPARK-9335] [STREAMING] [TESTS] Make sure the test stream is deleted in KinesisBackedBlockRDDSuite
KinesisBackedBlockRDDSuite should make sure delete the stream. Author: zsxwing <zsxwing@gmail.com> Closes #7663 from zsxwing/fix-SPARK-9335 and squashes the following commits: f0e9154 [zsxwing] Revert "[HOTFIX] - Disable Kinesis tests due to rate limits" 71a4552 [zsxwing] Make sure the test stream is deleted
Diffstat (limited to 'extras')
-rw-r--r--extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDDSuite.scala7
-rw-r--r--extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisStreamSuite.scala4
2 files changed, 7 insertions, 4 deletions
diff --git a/extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDDSuite.scala b/extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDDSuite.scala
index b2e2a4246d..e81fb11e59 100644
--- a/extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDDSuite.scala
+++ b/extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisBackedBlockRDDSuite.scala
@@ -17,10 +17,10 @@
package org.apache.spark.streaming.kinesis
-import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
+import org.scalatest.BeforeAndAfterAll
import org.apache.spark.storage.{BlockId, BlockManager, StorageLevel, StreamBlockId}
-import org.apache.spark.{SparkConf, SparkContext, SparkException, SparkFunSuite}
+import org.apache.spark.{SparkConf, SparkContext, SparkException}
class KinesisBackedBlockRDDSuite extends KinesisFunSuite with BeforeAndAfterAll {
@@ -65,6 +65,9 @@ class KinesisBackedBlockRDDSuite extends KinesisFunSuite with BeforeAndAfterAll
}
override def afterAll(): Unit = {
+ if (testUtils != null) {
+ testUtils.deleteStream()
+ }
if (sc != null) {
sc.stop()
}
diff --git a/extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisStreamSuite.scala b/extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisStreamSuite.scala
index 4992b04176..f9c952b946 100644
--- a/extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisStreamSuite.scala
+++ b/extras/kinesis-asl/src/test/scala/org/apache/spark/streaming/kinesis/KinesisStreamSuite.scala
@@ -59,7 +59,7 @@ class KinesisStreamSuite extends KinesisFunSuite
}
}
- ignore("KinesisUtils API") {
+ test("KinesisUtils API") {
ssc = new StreamingContext(sc, Seconds(1))
// Tests the API, does not actually test data receiving
val kinesisStream1 = KinesisUtils.createStream(ssc, "mySparkStream",
@@ -83,7 +83,7 @@ class KinesisStreamSuite extends KinesisFunSuite
* you must have AWS credentials available through the default AWS provider chain,
* and you have to set the system environment variable RUN_KINESIS_TESTS=1 .
*/
- ignore("basic operation") {
+ testIfEnabled("basic operation") {
val kinesisTestUtils = new KinesisTestUtils()
try {
kinesisTestUtils.createStream()