diff options
author | Reynold Xin <rxin@databricks.com> | 2016-07-25 20:03:54 +0100 |
---|---|---|
committer | Sean Owen <sowen@cloudera.com> | 2016-07-25 20:03:54 +0100 |
commit | dd784a8822497ad0631208d56325c4d74ab9e036 (patch) | |
tree | 54fe835421d9c95ef5041aa9d2b7723e3fed3672 /dev/audit-release/sbt_app_core | |
parent | ad3708e78377d631e3d586548c961f4748322bf0 (diff) | |
download | spark-dd784a8822497ad0631208d56325c4d74ab9e036.tar.gz spark-dd784a8822497ad0631208d56325c4d74ab9e036.tar.bz2 spark-dd784a8822497ad0631208d56325c4d74ab9e036.zip |
[SPARK-16685] Remove audit-release scripts.
## What changes were proposed in this pull request?
This patch removes dev/audit-release. It was initially created to do basic release auditing. They have been unused by for the last one year+.
## How was this patch tested?
N/A
Author: Reynold Xin <rxin@databricks.com>
Closes #14342 from rxin/SPARK-16685.
Diffstat (limited to 'dev/audit-release/sbt_app_core')
-rw-r--r-- | dev/audit-release/sbt_app_core/build.sbt | 28 | ||||
-rw-r--r-- | dev/audit-release/sbt_app_core/input.txt | 8 | ||||
-rw-r--r-- | dev/audit-release/sbt_app_core/src/main/scala/SparkApp.scala | 63 |
3 files changed, 0 insertions, 99 deletions
diff --git a/dev/audit-release/sbt_app_core/build.sbt b/dev/audit-release/sbt_app_core/build.sbt deleted file mode 100644 index 291b1d6440..0000000000 --- a/dev/audit-release/sbt_app_core/build.sbt +++ /dev/null @@ -1,28 +0,0 @@ -// -// Licensed to the Apache Software Foundation (ASF) under one or more -// contributor license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright ownership. -// The ASF licenses this file to You under the Apache License, Version 2.0 -// (the "License"); you may not use this file except in compliance with -// the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -name := "Simple Project" - -version := "1.0" - -scalaVersion := System.getenv.get("SCALA_VERSION") - -libraryDependencies += "org.apache.spark" %% "spark-core" % System.getenv.get("SPARK_VERSION") - -resolvers ++= Seq( - "Spark Release Repository" at System.getenv.get("SPARK_RELEASE_REPOSITORY"), - "Spray Repository" at "http://repo.spray.cc/") diff --git a/dev/audit-release/sbt_app_core/input.txt b/dev/audit-release/sbt_app_core/input.txt deleted file mode 100644 index 837b6f85ae..0000000000 --- a/dev/audit-release/sbt_app_core/input.txt +++ /dev/null @@ -1,8 +0,0 @@ -a -b -c -d -a -b -c -d diff --git a/dev/audit-release/sbt_app_core/src/main/scala/SparkApp.scala b/dev/audit-release/sbt_app_core/src/main/scala/SparkApp.scala deleted file mode 100644 index 61d91c70e9..0000000000 --- a/dev/audit-release/sbt_app_core/src/main/scala/SparkApp.scala +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// scalastyle:off println -package main.scala - -import scala.util.Try - -import org.apache.spark.SparkConf -import org.apache.spark.SparkContext -import org.apache.spark.SparkContext._ - -object SimpleApp { - def main(args: Array[String]) { - val conf = sys.env.get("SPARK_AUDIT_MASTER") match { - case Some(master) => new SparkConf().setAppName("Simple Spark App").setMaster(master) - case None => new SparkConf().setAppName("Simple Spark App") - } - val logFile = "input.txt" - val sc = new SparkContext(conf) - val logData = sc.textFile(logFile, 2).cache() - val numAs = logData.filter(line => line.contains("a")).count() - val numBs = logData.filter(line => line.contains("b")).count() - if (numAs != 2 || numBs != 2) { - println("Failed to parse log files with Spark") - System.exit(-1) - } - - // Regression test for SPARK-1167: Remove metrics-ganglia from default build due to LGPL issue - val foundConsole = Try(Class.forName("org.apache.spark.metrics.sink.ConsoleSink")).isSuccess - val foundGanglia = Try(Class.forName("org.apache.spark.metrics.sink.GangliaSink")).isSuccess - if (!foundConsole) { - println("Console sink not loaded via spark-core") - System.exit(-1) - } - if (foundGanglia) { - println("Ganglia sink was loaded via spark-core") - System.exit(-1) - } - - // Remove kinesis from default build due to ASL license issue - val foundKinesis = Try(Class.forName("org.apache.spark.streaming.kinesis.KinesisUtils")).isSuccess - if (foundKinesis) { - println("Kinesis was loaded via spark-core") - System.exit(-1) - } - } -} -// scalastyle:on println |