aboutsummaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorImran Rashid <imran@quantifind.com>2012-07-12 12:42:10 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2012-07-28 20:15:42 -0700
commitedc6972f8e14e78a243040f8c4e252884b63c55d (patch)
tree459685567f01ba8324e476a20e68141f064e1b60 /examples
parent83659af11c44779bf79db4ee8052522e1fe65c37 (diff)
downloadspark-edc6972f8e14e78a243040f8c4e252884b63c55d.tar.gz
spark-edc6972f8e14e78a243040f8c4e252884b63c55d.tar.bz2
spark-edc6972f8e14e78a243040f8c4e252884b63c55d.zip
move Vector class into core and spark.util package
Diffstat (limited to 'examples')
-rw-r--r--examples/src/main/scala/spark/examples/LocalFileLR.scala2
-rw-r--r--examples/src/main/scala/spark/examples/LocalKMeans.scala3
-rw-r--r--examples/src/main/scala/spark/examples/LocalLR.scala2
-rw-r--r--examples/src/main/scala/spark/examples/SparkHdfsLR.scala2
-rw-r--r--examples/src/main/scala/spark/examples/SparkKMeans.scala2
-rw-r--r--examples/src/main/scala/spark/examples/SparkLR.scala2
-rw-r--r--examples/src/main/scala/spark/examples/Vector.scala81
7 files changed, 6 insertions, 88 deletions
diff --git a/examples/src/main/scala/spark/examples/LocalFileLR.scala b/examples/src/main/scala/spark/examples/LocalFileLR.scala
index b819fe80fe..f958ef9f72 100644
--- a/examples/src/main/scala/spark/examples/LocalFileLR.scala
+++ b/examples/src/main/scala/spark/examples/LocalFileLR.scala
@@ -1,7 +1,7 @@
package spark.examples
import java.util.Random
-import Vector._
+import spark.util.Vector
object LocalFileLR {
val D = 10 // Numer of dimensions
diff --git a/examples/src/main/scala/spark/examples/LocalKMeans.scala b/examples/src/main/scala/spark/examples/LocalKMeans.scala
index 7e8e7a6959..b442c604cd 100644
--- a/examples/src/main/scala/spark/examples/LocalKMeans.scala
+++ b/examples/src/main/scala/spark/examples/LocalKMeans.scala
@@ -1,8 +1,7 @@
package spark.examples
import java.util.Random
-import Vector._
-import spark.SparkContext
+import spark.util.Vector
import spark.SparkContext._
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
diff --git a/examples/src/main/scala/spark/examples/LocalLR.scala b/examples/src/main/scala/spark/examples/LocalLR.scala
index 72c5009109..f2ac2b3e06 100644
--- a/examples/src/main/scala/spark/examples/LocalLR.scala
+++ b/examples/src/main/scala/spark/examples/LocalLR.scala
@@ -1,7 +1,7 @@
package spark.examples
import java.util.Random
-import Vector._
+import spark.util.Vector
object LocalLR {
val N = 10000 // Number of data points
diff --git a/examples/src/main/scala/spark/examples/SparkHdfsLR.scala b/examples/src/main/scala/spark/examples/SparkHdfsLR.scala
index 13b6ec1d3f..5b2bc84d69 100644
--- a/examples/src/main/scala/spark/examples/SparkHdfsLR.scala
+++ b/examples/src/main/scala/spark/examples/SparkHdfsLR.scala
@@ -2,7 +2,7 @@ package spark.examples
import java.util.Random
import scala.math.exp
-import Vector._
+import spark.util.Vector
import spark._
object SparkHdfsLR {
diff --git a/examples/src/main/scala/spark/examples/SparkKMeans.scala b/examples/src/main/scala/spark/examples/SparkKMeans.scala
index 5eb1c95a16..adce551322 100644
--- a/examples/src/main/scala/spark/examples/SparkKMeans.scala
+++ b/examples/src/main/scala/spark/examples/SparkKMeans.scala
@@ -1,8 +1,8 @@
package spark.examples
import java.util.Random
-import Vector._
import spark.SparkContext
+import spark.util.Vector
import spark.SparkContext._
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
diff --git a/examples/src/main/scala/spark/examples/SparkLR.scala b/examples/src/main/scala/spark/examples/SparkLR.scala
index 7715e5a713..19123db738 100644
--- a/examples/src/main/scala/spark/examples/SparkLR.scala
+++ b/examples/src/main/scala/spark/examples/SparkLR.scala
@@ -2,7 +2,7 @@ package spark.examples
import java.util.Random
import scala.math.exp
-import Vector._
+import spark.util.Vector
import spark._
object SparkLR {
diff --git a/examples/src/main/scala/spark/examples/Vector.scala b/examples/src/main/scala/spark/examples/Vector.scala
deleted file mode 100644
index 2abccbafce..0000000000
--- a/examples/src/main/scala/spark/examples/Vector.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-package spark.examples
-
-class Vector(val elements: Array[Double]) extends Serializable {
- def length = elements.length
-
- def apply(index: Int) = elements(index)
-
- def + (other: Vector): Vector = {
- if (length != other.length)
- throw new IllegalArgumentException("Vectors of different length")
- return Vector(length, i => this(i) + other(i))
- }
-
- def - (other: Vector): Vector = {
- if (length != other.length)
- throw new IllegalArgumentException("Vectors of different length")
- return Vector(length, i => this(i) - other(i))
- }
-
- def dot(other: Vector): Double = {
- if (length != other.length)
- throw new IllegalArgumentException("Vectors of different length")
- var ans = 0.0
- var i = 0
- while (i < length) {
- ans += this(i) * other(i)
- i += 1
- }
- return ans
- }
-
- def * (scale: Double): Vector = Vector(length, i => this(i) * scale)
-
- def / (d: Double): Vector = this * (1 / d)
-
- def unary_- = this * -1
-
- def sum = elements.reduceLeft(_ + _)
-
- def squaredDist(other: Vector): Double = {
- var ans = 0.0
- var i = 0
- while (i < length) {
- ans += (this(i) - other(i)) * (this(i) - other(i))
- i += 1
- }
- return ans
- }
-
- def dist(other: Vector): Double = math.sqrt(squaredDist(other))
-
- override def toString = elements.mkString("(", ", ", ")")
-}
-
-object Vector {
- def apply(elements: Array[Double]) = new Vector(elements)
-
- def apply(elements: Double*) = new Vector(elements.toArray)
-
- def apply(length: Int, initializer: Int => Double): Vector = {
- val elements = new Array[Double](length)
- for (i <- 0 until length)
- elements(i) = initializer(i)
- return new Vector(elements)
- }
-
- def zeros(length: Int) = new Vector(new Array[Double](length))
-
- def ones(length: Int) = Vector(length, _ => 1)
-
- class Multiplier(num: Double) {
- def * (vec: Vector) = vec * num
- }
-
- implicit def doubleToMultiplier(num: Double) = new Multiplier(num)
-
- implicit object VectorAccumParam extends spark.AccumulatorParam[Vector] {
- def addInPlace(t1: Vector, t2: Vector) = t1 + t2
- def zero(initialValue: Vector) = Vector.zeros(initialValue.length)
- }
-}