aboutsummaryrefslogtreecommitdiff
path: root/dev
diff options
context:
space:
mode:
authorShixiong Zhu <shixiong@databricks.com>2016-03-14 16:56:04 -0700
committerReynold Xin <rxin@databricks.com>2016-03-14 16:56:04 -0700
commit06dec37455c3f800897defee6fad0da623f26050 (patch)
treed49dd098587f8a3c7a019b0aad605327da6fcecd /dev
parent8301fadd8d269da11e72870b7a889596e3337839 (diff)
downloadspark-06dec37455c3f800897defee6fad0da623f26050.tar.gz
spark-06dec37455c3f800897defee6fad0da623f26050.tar.bz2
spark-06dec37455c3f800897defee6fad0da623f26050.zip
[SPARK-13843][STREAMING] Remove streaming-flume, streaming-mqtt, streaming-zeromq, streaming-akka, streaming-twitter to Spark packages
## What changes were proposed in this pull request? Currently there are a few sub-projects, each for integrating with different external sources for Streaming. Now that we have better ability to include external libraries (spark packages) and with Spark 2.0 coming up, we can move the following projects out of Spark to https://github.com/spark-packages - streaming-flume - streaming-akka - streaming-mqtt - streaming-zeromq - streaming-twitter They are just some ancillary packages and considering the overhead of maintenance, running tests and PR failures, it's better to maintain them out of Spark. In addition, these projects can have their different release cycles and we can release them faster. I have already copied these projects to https://github.com/spark-packages ## How was this patch tested? Jenkins tests Author: Shixiong Zhu <shixiong@databricks.com> Closes #11672 from zsxwing/remove-external-pkg.
Diffstat (limited to 'dev')
-rwxr-xr-xdev/audit-release/audit_release.py3
-rwxr-xr-xdev/run-tests.py3
-rw-r--r--dev/sparktestsupport/modules.py84
3 files changed, 1 insertions, 89 deletions
diff --git a/dev/audit-release/audit_release.py b/dev/audit-release/audit_release.py
index 4dabb51254..426b3117f1 100755
--- a/dev/audit-release/audit_release.py
+++ b/dev/audit-release/audit_release.py
@@ -116,8 +116,7 @@ original_dir = os.getcwd()
# dependencies within those projects.
modules = [
"spark-core", "spark-mllib", "spark-streaming", "spark-repl",
- "spark-graphx", "spark-streaming-flume", "spark-streaming-kafka",
- "spark-streaming-mqtt", "spark-streaming-twitter", "spark-streaming-zeromq",
+ "spark-graphx", "spark-streaming-kafka",
"spark-catalyst", "spark-sql", "spark-hive", "spark-streaming-kinesis-asl"
]
modules = map(lambda m: "%s_%s" % (m, SCALA_BINARY_VERSION), modules)
diff --git a/dev/run-tests.py b/dev/run-tests.py
index a1e6f1bdb5..d940cdad3e 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -337,9 +337,6 @@ def build_spark_sbt(hadoop_version):
build_profiles = get_hadoop_profiles(hadoop_version) + modules.root.build_profile_flags
sbt_goals = ["package",
"streaming-kafka-assembly/assembly",
- "streaming-flume-assembly/assembly",
- "streaming-mqtt-assembly/assembly",
- "streaming-mqtt/test:assembly",
"streaming-kinesis-asl-assembly/assembly"]
profiles_and_goals = build_profiles + sbt_goals
diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py
index 1781de4c65..d1184886e2 100644
--- a/dev/sparktestsupport/modules.py
+++ b/dev/sparktestsupport/modules.py
@@ -210,43 +210,6 @@ streaming_kinesis_asl = Module(
)
-streaming_zeromq = Module(
- name="streaming-zeromq",
- dependencies=[streaming],
- source_file_regexes=[
- "external/zeromq",
- ],
- sbt_test_goals=[
- "streaming-zeromq/test",
- ]
-)
-
-
-streaming_twitter = Module(
- name="streaming-twitter",
- dependencies=[streaming],
- source_file_regexes=[
- "external/twitter",
- ],
- sbt_test_goals=[
- "streaming-twitter/test",
- ]
-)
-
-
-streaming_mqtt = Module(
- name="streaming-mqtt",
- dependencies=[streaming],
- source_file_regexes=[
- "external/mqtt",
- "external/mqtt-assembly",
- ],
- sbt_test_goals=[
- "streaming-mqtt/test",
- ]
-)
-
-
streaming_kafka = Module(
name="streaming-kafka",
dependencies=[streaming],
@@ -260,51 +223,6 @@ streaming_kafka = Module(
)
-streaming_flume_sink = Module(
- name="streaming-flume-sink",
- dependencies=[streaming],
- source_file_regexes=[
- "external/flume-sink",
- ],
- sbt_test_goals=[
- "streaming-flume-sink/test",
- ]
-)
-
-
-streaming_akka = Module(
- name="streaming-akka",
- dependencies=[streaming],
- source_file_regexes=[
- "external/akka",
- ],
- sbt_test_goals=[
- "streaming-akka/test",
- ]
-)
-
-
-streaming_flume = Module(
- name="streaming-flume",
- dependencies=[streaming],
- source_file_regexes=[
- "external/flume",
- ],
- sbt_test_goals=[
- "streaming-flume/test",
- ]
-)
-
-
-streaming_flume_assembly = Module(
- name="streaming-flume-assembly",
- dependencies=[streaming_flume, streaming_flume_sink],
- source_file_regexes=[
- "external/flume-assembly",
- ]
-)
-
-
mllib = Module(
name="mllib",
dependencies=[streaming, sql],
@@ -376,8 +294,6 @@ pyspark_streaming = Module(
pyspark_core,
streaming,
streaming_kafka,
- streaming_flume_assembly,
- streaming_mqtt,
streaming_kinesis_asl
],
source_file_regexes=[