From 574c0d28c2e859a6ed31a2c193cf04e0aa7404a5 Mon Sep 17 00:00:00 2001 From: Ankur Dave Date: Sat, 11 Jan 2014 12:38:35 -0800 Subject: Use SparkConf in GraphX tests (via LocalSparkContext) --- .../test/scala/org/apache/spark/graphx/LocalSparkContext.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'graphx/src') diff --git a/graphx/src/test/scala/org/apache/spark/graphx/LocalSparkContext.scala b/graphx/src/test/scala/org/apache/spark/graphx/LocalSparkContext.scala index 6aec2ea8a9..aa9ba84084 100644 --- a/graphx/src/test/scala/org/apache/spark/graphx/LocalSparkContext.scala +++ b/graphx/src/test/scala/org/apache/spark/graphx/LocalSparkContext.scala @@ -3,20 +3,20 @@ package org.apache.spark.graphx import org.scalatest.Suite import org.scalatest.BeforeAndAfterEach +import org.apache.spark.SparkConf import org.apache.spark.SparkContext - /** * Provides a method to run tests against a {@link SparkContext} variable that is correctly stopped * after each test. */ trait LocalSparkContext { - System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer") - System.setProperty("spark.kryo.registrator", "org.apache.spark.graphx.GraphKryoRegistrator") - /** Runs `f` on a new SparkContext and ensures that it is stopped afterwards. */ def withSpark[T](f: SparkContext => T) = { - val sc = new SparkContext("local", "test") + val conf = new SparkConf() + .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") + .set("spark.kryo.registrator", "org.apache.spark.graphx.GraphKryoRegistrator") + val sc = new SparkContext("local", "test", conf) try { f(sc) } finally { -- cgit v1.2.3