aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorImran Rashid <imran@quantifind.com>2012-07-12 12:42:10 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2012-07-28 20:15:42 -0700
commitedc6972f8e14e78a243040f8c4e252884b63c55d (patch)
tree459685567f01ba8324e476a20e68141f064e1b60
parent83659af11c44779bf79db4ee8052522e1fe65c37 (diff)
downloadspark-edc6972f8e14e78a243040f8c4e252884b63c55d.tar.gz
spark-edc6972f8e14e78a243040f8c4e252884b63c55d.tar.bz2
spark-edc6972f8e14e78a243040f8c4e252884b63c55d.zip
move Vector class into core and spark.util package
-rw-r--r--core/src/main/scala/spark/util/Vector.scala (renamed from examples/src/main/scala/spark/examples/Vector.scala)9
-rw-r--r--examples/src/main/scala/spark/examples/LocalFileLR.scala2
-rw-r--r--examples/src/main/scala/spark/examples/LocalKMeans.scala3
-rw-r--r--examples/src/main/scala/spark/examples/LocalLR.scala2
-rw-r--r--examples/src/main/scala/spark/examples/SparkHdfsLR.scala2
-rw-r--r--examples/src/main/scala/spark/examples/SparkKMeans.scala2
-rw-r--r--examples/src/main/scala/spark/examples/SparkLR.scala2
7 files changed, 12 insertions, 10 deletions
diff --git a/examples/src/main/scala/spark/examples/Vector.scala b/core/src/main/scala/spark/util/Vector.scala
index 2abccbafce..e5604687e9 100644
--- a/examples/src/main/scala/spark/examples/Vector.scala
+++ b/core/src/main/scala/spark/util/Vector.scala
@@ -1,8 +1,8 @@
-package spark.examples
+package spark.util
class Vector(val elements: Array[Double]) extends Serializable {
def length = elements.length
-
+
def apply(index: Int) = elements(index)
def + (other: Vector): Vector = {
@@ -30,11 +30,12 @@ class Vector(val elements: Array[Double]) extends Serializable {
}
def * (scale: Double): Vector = Vector(length, i => this(i) * scale)
+ def * (scale: Double): Vector = Vector(length, i => this(i) * scale)
def / (d: Double): Vector = this * (1 / d)
def unary_- = this * -1
-
+
def sum = elements.reduceLeft(_ + _)
def squaredDist(other: Vector): Double = {
@@ -76,6 +77,8 @@ object Vector {
implicit object VectorAccumParam extends spark.AccumulatorParam[Vector] {
def addInPlace(t1: Vector, t2: Vector) = t1 + t2
+
def zero(initialValue: Vector) = Vector.zeros(initialValue.length)
}
+
}
diff --git a/examples/src/main/scala/spark/examples/LocalFileLR.scala b/examples/src/main/scala/spark/examples/LocalFileLR.scala
index b819fe80fe..f958ef9f72 100644
--- a/examples/src/main/scala/spark/examples/LocalFileLR.scala
+++ b/examples/src/main/scala/spark/examples/LocalFileLR.scala
@@ -1,7 +1,7 @@
package spark.examples
import java.util.Random
-import Vector._
+import spark.util.Vector
object LocalFileLR {
val D = 10 // Numer of dimensions
diff --git a/examples/src/main/scala/spark/examples/LocalKMeans.scala b/examples/src/main/scala/spark/examples/LocalKMeans.scala
index 7e8e7a6959..b442c604cd 100644
--- a/examples/src/main/scala/spark/examples/LocalKMeans.scala
+++ b/examples/src/main/scala/spark/examples/LocalKMeans.scala
@@ -1,8 +1,7 @@
package spark.examples
import java.util.Random
-import Vector._
-import spark.SparkContext
+import spark.util.Vector
import spark.SparkContext._
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
diff --git a/examples/src/main/scala/spark/examples/LocalLR.scala b/examples/src/main/scala/spark/examples/LocalLR.scala
index 72c5009109..f2ac2b3e06 100644
--- a/examples/src/main/scala/spark/examples/LocalLR.scala
+++ b/examples/src/main/scala/spark/examples/LocalLR.scala
@@ -1,7 +1,7 @@
package spark.examples
import java.util.Random
-import Vector._
+import spark.util.Vector
object LocalLR {
val N = 10000 // Number of data points
diff --git a/examples/src/main/scala/spark/examples/SparkHdfsLR.scala b/examples/src/main/scala/spark/examples/SparkHdfsLR.scala
index 13b6ec1d3f..5b2bc84d69 100644
--- a/examples/src/main/scala/spark/examples/SparkHdfsLR.scala
+++ b/examples/src/main/scala/spark/examples/SparkHdfsLR.scala
@@ -2,7 +2,7 @@ package spark.examples
import java.util.Random
import scala.math.exp
-import Vector._
+import spark.util.Vector
import spark._
object SparkHdfsLR {
diff --git a/examples/src/main/scala/spark/examples/SparkKMeans.scala b/examples/src/main/scala/spark/examples/SparkKMeans.scala
index 5eb1c95a16..adce551322 100644
--- a/examples/src/main/scala/spark/examples/SparkKMeans.scala
+++ b/examples/src/main/scala/spark/examples/SparkKMeans.scala
@@ -1,8 +1,8 @@
package spark.examples
import java.util.Random
-import Vector._
import spark.SparkContext
+import spark.util.Vector
import spark.SparkContext._
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
diff --git a/examples/src/main/scala/spark/examples/SparkLR.scala b/examples/src/main/scala/spark/examples/SparkLR.scala
index 7715e5a713..19123db738 100644
--- a/examples/src/main/scala/spark/examples/SparkLR.scala
+++ b/examples/src/main/scala/spark/examples/SparkLR.scala
@@ -2,7 +2,7 @@ package spark.examples
import java.util.Random
import scala.math.exp
-import Vector._
+import spark.util.Vector
import spark._
object SparkLR {