aboutsummaryrefslogtreecommitdiff
path: root/dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-07-25 20:03:54 +0100
committerSean Owen <sowen@cloudera.com>2016-07-25 20:03:54 +0100
commitdd784a8822497ad0631208d56325c4d74ab9e036 (patch)
tree54fe835421d9c95ef5041aa9d2b7723e3fed3672 /dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala
parentad3708e78377d631e3d586548c961f4748322bf0 (diff)
downloadspark-dd784a8822497ad0631208d56325c4d74ab9e036.tar.gz
spark-dd784a8822497ad0631208d56325c4d74ab9e036.tar.bz2
spark-dd784a8822497ad0631208d56325c4d74ab9e036.zip
[SPARK-16685] Remove audit-release scripts.
## What changes were proposed in this pull request? This patch removes dev/audit-release. It was initially created to do basic release auditing. They have been unused by for the last one year+. ## How was this patch tested? N/A Author: Reynold Xin <rxin@databricks.com> Closes #14342 from rxin/SPARK-16685.
Diffstat (limited to 'dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala')
-rw-r--r--dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala65
1 files changed, 0 insertions, 65 deletions
diff --git a/dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala b/dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala
deleted file mode 100644
index d6a074687f..0000000000
--- a/dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// scalastyle:off println
-package main.scala
-
-import scala.collection.mutable.{ListBuffer, Queue}
-
-import org.apache.spark.SparkConf
-import org.apache.spark.rdd.RDD
-import org.apache.spark.streaming.StreamingContext
-import org.apache.spark.streaming._
-
-object SparkStreamingExample {
-
- def main(args: Array[String]) {
- val conf = sys.env.get("SPARK_AUDIT_MASTER") match {
- case Some(master) => new SparkConf().setAppName("Simple Streaming App").setMaster(master)
- case None => new SparkConf().setAppName("Simple Streaming App")
- }
- val ssc = new StreamingContext(conf, Seconds(1))
- val seen = ListBuffer[RDD[Int]]()
-
- val rdd1 = ssc.sparkContext.makeRDD(1 to 100, 10)
- val rdd2 = ssc.sparkContext.makeRDD(1 to 1000, 10)
- val rdd3 = ssc.sparkContext.makeRDD(1 to 10000, 10)
-
- val queue = Queue(rdd1, rdd2, rdd3)
- val stream = ssc.queueStream(queue)
-
- stream.foreachRDD(rdd => seen += rdd)
- ssc.start()
- Thread.sleep(5000)
-
- def test(f: => Boolean, failureMsg: String) = {
- if (!f) {
- println(failureMsg)
- System.exit(-1)
- }
- }
-
- val rddCounts = seen.map(rdd => rdd.count()).filter(_ > 0)
- test(rddCounts.length == 3, "Did not collect three RDD's from stream")
- test(rddCounts.toSet == Set(100, 1000, 10000), "Did not find expected streams")
-
- println("Test succeeded")
-
- ssc.stop()
- }
-}
-// scalastyle:on println