aboutsummaryrefslogtreecommitdiff
path: root/streaming
diff options
context:
space:
mode:
authorTathagata Das <tathagata.das1565@gmail.com>2014-01-15 12:15:46 -0800
committerTathagata Das <tathagata.das1565@gmail.com>2014-01-15 12:15:46 -0800
commit9e6375349e945fed87bbeae565f35ab9ca28f70f (patch)
treed6f41dc97121d6a99c4c057a55de687c6d11dcfe /streaming
parent0e15bd7827d5acb5c1ccb071e358338817f95a79 (diff)
downloadspark-9e6375349e945fed87bbeae565f35ab9ca28f70f.tar.gz
spark-9e6375349e945fed87bbeae565f35ab9ca28f70f.tar.bz2
spark-9e6375349e945fed87bbeae565f35ab9ca28f70f.zip
Made some classes private[stremaing] and deprecated a method in JavaStreamingContext.
Diffstat (limited to 'streaming')
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala1
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala1
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala1
-rw-r--r--streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java6
5 files changed, 11 insertions, 4 deletions
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
index 4edf8fa13a..613683ca40 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
@@ -141,8 +141,12 @@ class JavaStreamingContext(val ssc: StreamingContext) {
*/
def this(path: String, hadoopConf: Configuration) = this(new StreamingContext(path, hadoopConf))
+
+ @deprecated("use sparkContext", "0.9.0")
+ val sc: JavaSparkContext = sparkContext
+
/** The underlying SparkContext */
- val sc: JavaSparkContext = new JavaSparkContext(ssc.sc)
+ val sparkContext = new JavaSparkContext(ssc.sc)
/**
* Create a input stream from network source hostname:port. Data is received using
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala
index b9c0596378..179fd75939 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RateLimitedOutputStream.scala
@@ -22,6 +22,7 @@ import scala.annotation.tailrec
import java.io.OutputStream
import java.util.concurrent.TimeUnit._
+private[streaming]
class RateLimitedOutputStream(out: OutputStream, bytesPerSec: Int) extends OutputStream {
val SYNC_INTERVAL = NANOSECONDS.convert(10, SECONDS)
val CHUNK_SIZE = 8192
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala
index 5b6c048a39..07021ebb58 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala
@@ -22,6 +22,7 @@ import org.apache.spark.SparkContext._
import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
import scala.collection.JavaConversions.mapAsScalaMap
+private[streaming]
object RawTextHelper {
/**
diff --git a/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
index 463617a713..684b38e8b3 100644
--- a/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
@@ -33,6 +33,7 @@ import org.apache.spark.util.IntParam
* A helper program that sends blocks of Kryo-serialized text strings out on a socket at a
* specified rate. Used to feed data into RawInputDStream.
*/
+private[streaming]
object RawTextSender extends Logging {
def main(args: Array[String]) {
if (args.length != 4) {
diff --git a/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java b/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java
index 8b7d7709bf..4fbbce9b8b 100644
--- a/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java
+++ b/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java
@@ -297,9 +297,9 @@ public class JavaAPISuite extends LocalJavaStreamingContext implements Serializa
Arrays.asList(7,8,9));
JavaSparkContext jsc = new JavaSparkContext(ssc.ssc().sc());
- JavaRDD<Integer> rdd1 = ssc.sc().parallelize(Arrays.asList(1, 2, 3));
- JavaRDD<Integer> rdd2 = ssc.sc().parallelize(Arrays.asList(4, 5, 6));
- JavaRDD<Integer> rdd3 = ssc.sc().parallelize(Arrays.asList(7,8,9));
+ JavaRDD<Integer> rdd1 = ssc.sparkContext().parallelize(Arrays.asList(1, 2, 3));
+ JavaRDD<Integer> rdd2 = ssc.sparkContext().parallelize(Arrays.asList(4, 5, 6));
+ JavaRDD<Integer> rdd3 = ssc.sparkContext().parallelize(Arrays.asList(7,8,9));
LinkedList<JavaRDD<Integer>> rdds = Lists.newLinkedList();
rdds.add(rdd1);