aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPunya Biswal <pbiswal@palantir.com>2014-02-22 17:53:48 -0800
committerAaron Davidson <aaron@databricks.com>2014-02-22 17:53:48 -0800
commit29ac7ea52fbb0c6531e14305e2fb1ccba9678f7e (patch)
tree01349562ee684de12d8b0207a4548a8ddd30d385
parent1aa4f8af7220bc311196ef0eef0a4814cd2757d3 (diff)
downloadspark-29ac7ea52fbb0c6531e14305e2fb1ccba9678f7e.tar.gz
spark-29ac7ea52fbb0c6531e14305e2fb1ccba9678f7e.tar.bz2
spark-29ac7ea52fbb0c6531e14305e2fb1ccba9678f7e.zip
Migrate Java code to Scala or move it to src/main/java
These classes can't be migrated: StorageLevels: impossible to create static fields in Scala JavaSparkContextVarargsWorkaround: incompatible varargs JavaAPISuite: should test Java APIs in pure Java (for sanity) Author: Punya Biswal <pbiswal@palantir.com> Closes #605 from punya/move-java-sources and squashes the following commits: 25b00b2 [Punya Biswal] Remove redundant type param; reformat 853da46 [Punya Biswal] Use factory method rather than constructor e5d53d9 [Punya Biswal] Migrate Java code to Scala or move it to src/main/java
-rw-r--r--core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java (renamed from core/src/main/scala/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java)0
-rw-r--r--core/src/main/java/org/apache/spark/api/java/StorageLevels.java (renamed from core/src/main/scala/org/apache/spark/api/java/StorageLevels.java)22
-rw-r--r--core/src/main/scala/org/apache/spark/SparkFiles.scala (renamed from core/src/main/scala/org/apache/spark/SparkFiles.java)19
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.scala (renamed from core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java)10
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.scala (renamed from core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java)7
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/Function.scala (renamed from core/src/main/scala/org/apache/spark/api/java/function/Function.java)14
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/Function2.scala (renamed from core/src/main/scala/org/apache/spark/api/java/function/Function2.java)16
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/Function3.scala (renamed from core/src/main/scala/org/apache/spark/api/java/function/Function3.java)17
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.scala (renamed from core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java)23
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/PairFunction.scala (renamed from core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java)20
-rw-r--r--core/src/test/java/org/apache/spark/JavaAPISuite.java (renamed from core/src/test/scala/org/apache/spark/JavaAPISuite.java)0
11 files changed, 56 insertions, 92 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java b/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
index 2090efd3b9..2090efd3b9 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
+++ b/core/src/main/java/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
diff --git a/core/src/main/scala/org/apache/spark/api/java/StorageLevels.java b/core/src/main/java/org/apache/spark/api/java/StorageLevels.java
index 0744269773..9f13b39909 100644
--- a/core/src/main/scala/org/apache/spark/api/java/StorageLevels.java
+++ b/core/src/main/java/org/apache/spark/api/java/StorageLevels.java
@@ -23,17 +23,17 @@ import org.apache.spark.storage.StorageLevel;
* Expose some commonly useful storage level constants.
*/
public class StorageLevels {
- public static final StorageLevel NONE = new StorageLevel(false, false, false, 1);
- public static final StorageLevel DISK_ONLY = new StorageLevel(true, false, false, 1);
- public static final StorageLevel DISK_ONLY_2 = new StorageLevel(true, false, false, 2);
- public static final StorageLevel MEMORY_ONLY = new StorageLevel(false, true, true, 1);
- public static final StorageLevel MEMORY_ONLY_2 = new StorageLevel(false, true, true, 2);
- public static final StorageLevel MEMORY_ONLY_SER = new StorageLevel(false, true, false, 1);
- public static final StorageLevel MEMORY_ONLY_SER_2 = new StorageLevel(false, true, false, 2);
- public static final StorageLevel MEMORY_AND_DISK = new StorageLevel(true, true, true, 1);
- public static final StorageLevel MEMORY_AND_DISK_2 = new StorageLevel(true, true, true, 2);
- public static final StorageLevel MEMORY_AND_DISK_SER = new StorageLevel(true, true, false, 1);
- public static final StorageLevel MEMORY_AND_DISK_SER_2 = new StorageLevel(true, true, false, 2);
+ public static final StorageLevel NONE = create(false, false, false, 1);
+ public static final StorageLevel DISK_ONLY = create(true, false, false, 1);
+ public static final StorageLevel DISK_ONLY_2 = create(true, false, false, 2);
+ public static final StorageLevel MEMORY_ONLY = create(false, true, true, 1);
+ public static final StorageLevel MEMORY_ONLY_2 = create(false, true, true, 2);
+ public static final StorageLevel MEMORY_ONLY_SER = create(false, true, false, 1);
+ public static final StorageLevel MEMORY_ONLY_SER_2 = create(false, true, false, 2);
+ public static final StorageLevel MEMORY_AND_DISK = create(true, true, true, 1);
+ public static final StorageLevel MEMORY_AND_DISK_2 = create(true, true, true, 2);
+ public static final StorageLevel MEMORY_AND_DISK_SER = create(true, true, false, 1);
+ public static final StorageLevel MEMORY_AND_DISK_SER_2 = create(true, true, false, 2);
/**
* Create a new StorageLevel object.
diff --git a/core/src/main/scala/org/apache/spark/SparkFiles.java b/core/src/main/scala/org/apache/spark/SparkFiles.scala
index af9cf85e37..e85b89fd01 100644
--- a/core/src/main/scala/org/apache/spark/SparkFiles.java
+++ b/core/src/main/scala/org/apache/spark/SparkFiles.scala
@@ -15,28 +15,25 @@
* limitations under the License.
*/
-package org.apache.spark;
+package org.apache.spark
-import java.io.File;
+import java.io.File
/**
* Resolves paths to files added through `SparkContext.addFile()`.
*/
-public class SparkFiles {
-
- private SparkFiles() {}
+object SparkFiles {
/**
* Get the absolute path of a file added through `SparkContext.addFile()`.
*/
- public static String get(String filename) {
- return new File(getRootDirectory(), filename).getAbsolutePath();
- }
+ def get(filename: String): String =
+ new File(getRootDirectory(), filename).getAbsolutePath()
/**
* Get the root directory that contains files added through `SparkContext.addFile()`.
*/
- public static String getRootDirectory() {
- return SparkEnv.get().sparkFilesDir();
- }
+ def getRootDirectory(): String =
+ SparkEnv.get.sparkFilesDir
+
}
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.scala
index 30e6a52474..7500a89436 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.scala
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package org.apache.spark.api.java.function;
+package org.apache.spark.api.java.function
-import java.io.Serializable;
+import java.lang.{Double => JDouble, Iterable => JIterable}
/**
* A function that returns zero or more records of type Double from each input record.
*/
// DoubleFlatMapFunction does not extend FlatMapFunction because flatMap is
// overloaded for both FlatMapFunction and DoubleFlatMapFunction.
-public abstract class DoubleFlatMapFunction<T> extends WrappedFunction1<T, Iterable<Double>>
- implements Serializable {
- // Intentionally left blank
+abstract class DoubleFlatMapFunction[T] extends WrappedFunction1[T, JIterable[JDouble]]
+ with Serializable {
+ // Intentionally left blank
}
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.scala
index 490da255bc..2cdf2e92c3 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.scala
@@ -15,16 +15,15 @@
* limitations under the License.
*/
-package org.apache.spark.api.java.function;
+package org.apache.spark.api.java.function
-import java.io.Serializable;
+import java.lang.{Double => JDouble}
/**
* A function that returns Doubles, and can be used to construct DoubleRDDs.
*/
// DoubleFunction does not extend Function because some UDF functions, like map,
// are overloaded for both Function and DoubleFunction.
-public abstract class DoubleFunction<T> extends WrappedFunction1<T, Double>
- implements Serializable {
+abstract class DoubleFunction[T] extends WrappedFunction1[T, JDouble] with Serializable {
// Intentionally left blank
}
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function.java b/core/src/main/scala/org/apache/spark/api/java/function/Function.scala
index e0fcd460c8..a5e1701f77 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function.scala
@@ -15,21 +15,17 @@
* limitations under the License.
*/
-package org.apache.spark.api.java.function;
+package org.apache.spark.api.java.function
-import java.io.Serializable;
-
-import scala.reflect.ClassTag;
-import scala.reflect.ClassTag$;
+import scala.reflect.ClassTag
+import org.apache.spark.api.java.JavaSparkContext
/**
* Base class for functions whose return types do not create special RDDs. PairFunction and
* DoubleFunction are handled separately, to allow PairRDDs and DoubleRDDs to be constructed
* when mapping RDDs of other types.
*/
-public abstract class Function<T, R> extends WrappedFunction1<T, R> implements Serializable {
- public ClassTag<R> returnType() {
- return ClassTag$.MODULE$.apply(Object.class);
- }
+abstract class Function[T, R] extends WrappedFunction1[T, R] with Serializable {
+ def returnType(): ClassTag[R] = JavaSparkContext.fakeClassTag
}
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java b/core/src/main/scala/org/apache/spark/api/java/function/Function2.scala
index 16d7379462..fa3616cbcb 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function2.scala
@@ -15,21 +15,15 @@
* limitations under the License.
*/
-package org.apache.spark.api.java.function;
+package org.apache.spark.api.java.function
-import java.io.Serializable;
-
-import scala.reflect.ClassTag;
-import scala.reflect.ClassTag$;
+import scala.reflect.ClassTag
+import org.apache.spark.api.java.JavaSparkContext
/**
* A two-argument function that takes arguments of type T1 and T2 and returns an R.
*/
-public abstract class Function2<T1, T2, R> extends WrappedFunction2<T1, T2, R>
- implements Serializable {
-
- public ClassTag<R> returnType() {
- return (ClassTag<R>) ClassTag$.MODULE$.apply(Object.class);
- }
+abstract class Function2[T1, T2, R] extends WrappedFunction2[T1, T2, R] with Serializable {
+ def returnType(): ClassTag[R] = JavaSparkContext.fakeClassTag
}
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java b/core/src/main/scala/org/apache/spark/api/java/function/Function3.scala
index 096eb71f95..45152891e9 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function3.scala
@@ -15,21 +15,14 @@
* limitations under the License.
*/
-package org.apache.spark.api.java.function;
+package org.apache.spark.api.java.function
-import java.io.Serializable;
-
-import scala.reflect.ClassTag;
-import scala.reflect.ClassTag$;
+import org.apache.spark.api.java.JavaSparkContext
+import scala.reflect.ClassTag
/**
* A three-argument function that takes arguments of type T1, T2 and T3 and returns an R.
*/
-public abstract class Function3<T1, T2, T3, R> extends WrappedFunction3<T1, T2, T3, R>
- implements Serializable {
-
- public ClassTag<R> returnType() {
- return (ClassTag<R>) ClassTag$.MODULE$.apply(Object.class);
- }
+abstract class Function3[T1, T2, T3, R] extends WrappedFunction3[T1, T2, T3, R] with Serializable {
+ def returnType(): ClassTag[R] = JavaSparkContext.fakeClassTag
}
-
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.scala
index c72b98c28a..8467bbb892 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.scala
@@ -15,13 +15,11 @@
* limitations under the License.
*/
-package org.apache.spark.api.java.function;
+package org.apache.spark.api.java.function
-import java.io.Serializable;
-
-import scala.Tuple2;
-import scala.reflect.ClassTag;
-import scala.reflect.ClassTag$;
+import java.lang.{Iterable => JIterable}
+import org.apache.spark.api.java.JavaSparkContext
+import scala.reflect.ClassTag
/**
* A function that returns zero or more key-value pair records from each input record. The
@@ -29,15 +27,10 @@ import scala.reflect.ClassTag$;
*/
// PairFlatMapFunction does not extend FlatMapFunction because flatMap is
// overloaded for both FlatMapFunction and PairFlatMapFunction.
-public abstract class PairFlatMapFunction<T, K, V>
- extends WrappedFunction1<T, Iterable<Tuple2<K, V>>>
- implements Serializable {
+abstract class PairFlatMapFunction[T, K, V] extends WrappedFunction1[T, JIterable[(K, V)]]
+ with Serializable {
- public ClassTag<K> keyType() {
- return (ClassTag<K>) ClassTag$.MODULE$.apply(Object.class);
- }
+ def keyType(): ClassTag[K] = JavaSparkContext.fakeClassTag
- public ClassTag<V> valueType() {
- return (ClassTag<V>) ClassTag$.MODULE$.apply(Object.class);
- }
+ def valueType(): ClassTag[V] = JavaSparkContext.fakeClassTag
}
diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.scala
index 84b9136d98..d0ba0b6307 100644
--- a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.scala
@@ -15,27 +15,19 @@
* limitations under the License.
*/
-package org.apache.spark.api.java.function;
+package org.apache.spark.api.java.function
-import java.io.Serializable;
-
-import scala.Tuple2;
-import scala.reflect.ClassTag;
-import scala.reflect.ClassTag$;
+import scala.reflect.ClassTag
+import org.apache.spark.api.java.JavaSparkContext
/**
* A function that returns key-value pairs (Tuple2<K, V>), and can be used to construct PairRDDs.
*/
// PairFunction does not extend Function because some UDF functions, like map,
// are overloaded for both Function and PairFunction.
-public abstract class PairFunction<T, K, V> extends WrappedFunction1<T, Tuple2<K, V>>
- implements Serializable {
+abstract class PairFunction[T, K, V] extends WrappedFunction1[T, (K, V)] with Serializable {
- public ClassTag<K> keyType() {
- return (ClassTag<K>) ClassTag$.MODULE$.apply(Object.class);
- }
+ def keyType(): ClassTag[K] = JavaSparkContext.fakeClassTag
- public ClassTag<V> valueType() {
- return (ClassTag<V>) ClassTag$.MODULE$.apply(Object.class);
- }
+ def valueType(): ClassTag[V] = JavaSparkContext.fakeClassTag
}
diff --git a/core/src/test/scala/org/apache/spark/JavaAPISuite.java b/core/src/test/java/org/apache/spark/JavaAPISuite.java
index 20232e9fbb..20232e9fbb 100644
--- a/core/src/test/scala/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/java/org/apache/spark/JavaAPISuite.java