aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-08-31 19:27:07 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-09-01 14:13:13 -0700
commit46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef (patch)
tree4a46971b36680bc5ef51be81ada8eb47670f6b22
parenta30fac16ca0525f2001b127e5f9518c9680844c9 (diff)
downloadspark-46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef.tar.gz
spark-46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef.tar.bz2
spark-46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef.zip
Initial work to rename package to org.apache.spark
-rw-r--r--README.md2
-rw-r--r--assembly/pom.xml16
-rw-r--r--assembly/src/main/assembly/assembly.xml10
-rw-r--r--bagel/pom.xml6
-rw-r--r--bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala (renamed from bagel/src/main/scala/spark/bagel/Bagel.scala)35
-rw-r--r--bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala (renamed from bagel/src/test/scala/bagel/BagelSuite.scala)10
-rwxr-xr-xbin/start-master.sh2
-rwxr-xr-xbin/start-slave.sh2
-rwxr-xr-xbin/stop-master.sh2
-rwxr-xr-xbin/stop-slaves.sh4
-rw-r--r--core/pom.xml4
-rw-r--r--core/src/main/java/org/apache/spark/network/netty/FileClient.java (renamed from core/src/main/java/spark/network/netty/FileClient.java)2
-rw-r--r--core/src/main/java/org/apache/spark/network/netty/FileClientChannelInitializer.java (renamed from core/src/main/java/spark/network/netty/FileClientChannelInitializer.java)2
-rw-r--r--core/src/main/java/org/apache/spark/network/netty/FileClientHandler.java (renamed from core/src/main/java/spark/network/netty/FileClientHandler.java)2
-rw-r--r--core/src/main/java/org/apache/spark/network/netty/FileServer.java (renamed from core/src/main/java/spark/network/netty/FileServer.java)2
-rw-r--r--core/src/main/java/org/apache/spark/network/netty/FileServerChannelInitializer.java (renamed from core/src/main/java/spark/network/netty/FileServerChannelInitializer.java)2
-rw-r--r--core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java (renamed from core/src/main/java/spark/network/netty/FileServerHandler.java)2
-rwxr-xr-xcore/src/main/java/org/apache/spark/network/netty/PathResolver.java (renamed from core/src/main/java/spark/network/netty/PathResolver.java)2
-rwxr-xr-xcore/src/main/resources/org/apache/spark/ui/static/bootstrap.min.css (renamed from core/src/main/resources/spark/ui/static/bootstrap.min.css)0
-rw-r--r--core/src/main/resources/org/apache/spark/ui/static/sorttable.js (renamed from core/src/main/resources/spark/ui/static/sorttable.js)0
-rw-r--r--core/src/main/resources/org/apache/spark/ui/static/spark-logo-77x50px-hd.png (renamed from core/src/main/resources/spark/ui/static/spark-logo-77x50px-hd.png)bin3536 -> 3536 bytes
-rw-r--r--core/src/main/resources/org/apache/spark/ui/static/spark_logo.png (renamed from core/src/main/resources/spark/ui/static/spark_logo.png)bin14233 -> 14233 bytes
-rw-r--r--core/src/main/resources/org/apache/spark/ui/static/webui.css (renamed from core/src/main/resources/spark/ui/static/webui.css)0
-rw-r--r--core/src/main/scala/org/apache/spark/Accumulators.scala (renamed from core/src/main/scala/spark/Accumulators.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/Aggregator.scala (renamed from core/src/main/scala/spark/Aggregator.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala (renamed from core/src/main/scala/spark/BlockStoreShuffleFetcher.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/CacheManager.scala (renamed from core/src/main/scala/spark/CacheManager.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/ClosureCleaner.scala (renamed from core/src/main/scala/spark/ClosureCleaner.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/Dependency.scala (renamed from core/src/main/scala/spark/Dependency.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/DoubleRDDFunctions.scala (renamed from core/src/main/scala/spark/DoubleRDDFunctions.scala)14
-rw-r--r--core/src/main/scala/org/apache/spark/FetchFailedException.scala (renamed from core/src/main/scala/spark/FetchFailedException.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/HttpFileServer.scala (renamed from core/src/main/scala/spark/HttpFileServer.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/HttpServer.scala (renamed from core/src/main/scala/spark/HttpServer.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/JavaSerializer.scala (renamed from core/src/main/scala/spark/JavaSerializer.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/KryoSerializer.scala (renamed from core/src/main/scala/spark/KryoSerializer.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/Logging.scala (renamed from core/src/main/scala/spark/Logging.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/MapOutputTracker.scala (renamed from core/src/main/scala/spark/MapOutputTracker.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/PairRDDFunctions.scala (renamed from core/src/main/scala/spark/PairRDDFunctions.scala)16
-rw-r--r--core/src/main/scala/org/apache/spark/Partition.scala (renamed from core/src/main/scala/spark/Partition.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/Partitioner.scala (renamed from core/src/main/scala/spark/Partitioner.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/RDD.scala (renamed from core/src/main/scala/spark/RDD.scala)62
-rw-r--r--core/src/main/scala/org/apache/spark/RDDCheckpointData.scala (renamed from core/src/main/scala/spark/RDDCheckpointData.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/SequenceFileRDDFunctions.scala (renamed from core/src/main/scala/spark/SequenceFileRDDFunctions.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/SerializableWritable.scala (renamed from core/src/main/scala/spark/SerializableWritable.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/ShuffleFetcher.scala (renamed from core/src/main/scala/spark/ShuffleFetcher.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/SizeEstimator.scala (renamed from core/src/main/scala/spark/SizeEstimator.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/SparkContext.scala (renamed from core/src/main/scala/spark/SparkContext.scala)34
-rw-r--r--core/src/main/scala/org/apache/spark/SparkEnv.scala (renamed from core/src/main/scala/spark/SparkEnv.scala)27
-rw-r--r--core/src/main/scala/org/apache/spark/SparkException.scala (renamed from core/src/main/scala/spark/SparkException.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/SparkFiles.java (renamed from core/src/main/scala/spark/SparkFiles.java)2
-rw-r--r--core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala (renamed from core/src/main/scala/spark/SparkHadoopWriter.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/TaskContext.scala (renamed from core/src/main/scala/spark/TaskContext.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/TaskEndReason.scala (renamed from core/src/main/scala/spark/TaskEndReason.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/TaskState.scala (renamed from core/src/main/scala/spark/TaskState.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/Utils.scala (renamed from core/src/main/scala/spark/Utils.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala (renamed from core/src/main/scala/spark/api/java/JavaDoubleRDD.scala)20
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala (renamed from core/src/main/scala/spark/api/java/JavaPairRDD.scala)24
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala (renamed from core/src/main/scala/spark/api/java/JavaRDD.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala (renamed from core/src/main/scala/spark/api/java/JavaRDDLike.scala)14
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala (renamed from core/src/main/scala/spark/api/java/JavaSparkContext.scala)26
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java (renamed from core/src/main/scala/spark/api/java/JavaSparkContextVarargsWorkaround.java)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/JavaUtils.scala (renamed from core/src/main/scala/spark/api/java/JavaUtils.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/StorageLevels.java (renamed from core/src/main/scala/spark/api/java/StorageLevels.java)4
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java (renamed from core/src/main/scala/spark/api/java/function/DoubleFlatMapFunction.java)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java (renamed from core/src/main/scala/spark/api/java/function/DoubleFunction.java)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction.scala (renamed from core/src/main/scala/spark/api/java/function/FlatMapFunction.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction2.scala (renamed from core/src/main/scala/spark/api/java/function/FlatMapFunction2.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/Function.java (renamed from core/src/main/scala/spark/api/java/function/Function.java)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/Function2.java (renamed from core/src/main/scala/spark/api/java/function/Function2.java)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java (renamed from core/src/main/scala/spark/api/java/function/PairFlatMapFunction.java)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java (renamed from core/src/main/scala/spark/api/java/function/PairFunction.java)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/VoidFunction.scala (renamed from core/src/main/scala/spark/api/java/function/VoidFunction.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction1.scala (renamed from core/src/main/scala/spark/api/java/function/WrappedFunction1.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction2.scala (renamed from core/src/main/scala/spark/api/java/function/WrappedFunction2.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala (renamed from core/src/main/scala/spark/api/python/PythonPartitioner.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala (renamed from core/src/main/scala/spark/api/python/PythonRDD.scala)12
-rw-r--r--core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala (renamed from core/src/main/scala/spark/api/python/PythonWorkerFactory.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/broadcast/BitTorrentBroadcast.scala (renamed from core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala (renamed from core/src/main/scala/spark/broadcast/Broadcast.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala (renamed from core/src/main/scala/spark/broadcast/BroadcastFactory.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala (renamed from core/src/main/scala/spark/broadcast/HttpBroadcast.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/broadcast/MultiTracker.scala (renamed from core/src/main/scala/spark/broadcast/MultiTracker.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/broadcast/SourceInfo.scala (renamed from core/src/main/scala/spark/broadcast/SourceInfo.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/broadcast/TreeBroadcast.scala (renamed from core/src/main/scala/spark/broadcast/TreeBroadcast.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala (renamed from core/src/main/scala/spark/deploy/ApplicationDescription.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/Command.scala (renamed from core/src/main/scala/spark/deploy/Command.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/DeployMessage.scala (renamed from core/src/main/scala/spark/deploy/DeployMessage.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/ExecutorState.scala (renamed from core/src/main/scala/spark/deploy/ExecutorState.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala (renamed from core/src/main/scala/spark/deploy/JsonProtocol.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala (renamed from core/src/main/scala/spark/deploy/LocalSparkCluster.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala (renamed from core/src/main/scala/spark/deploy/SparkHadoopUtil.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/WebUI.scala (renamed from core/src/main/scala/spark/deploy/WebUI.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/client/Client.scala (renamed from core/src/main/scala/spark/deploy/client/Client.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/client/ClientListener.scala (renamed from core/src/main/scala/spark/deploy/client/ClientListener.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala (renamed from core/src/main/scala/spark/deploy/client/TestClient.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/client/TestExecutor.scala (renamed from core/src/main/scala/spark/deploy/client/TestExecutor.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala (renamed from core/src/main/scala/spark/deploy/master/ApplicationInfo.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala (renamed from core/src/main/scala/spark/deploy/master/ApplicationSource.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ApplicationState.scala (renamed from core/src/main/scala/spark/deploy/master/ApplicationState.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ExecutorInfo.scala (renamed from core/src/main/scala/spark/deploy/master/ExecutorInfo.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/Master.scala (renamed from core/src/main/scala/spark/deploy/master/Master.scala)14
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala (renamed from core/src/main/scala/spark/deploy/master/MasterArguments.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/MasterSource.scala (renamed from core/src/main/scala/spark/deploy/master/MasterSource.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/WorkerInfo.scala (renamed from core/src/main/scala/spark/deploy/master/WorkerInfo.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/WorkerState.scala (renamed from core/src/main/scala/spark/deploy/master/WorkerState.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala (renamed from core/src/main/scala/spark/deploy/master/ui/ApplicationPage.scala)12
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala (renamed from core/src/main/scala/spark/deploy/master/ui/IndexPage.scala)14
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala (renamed from core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala)12
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala (renamed from core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala (renamed from core/src/main/scala/spark/deploy/worker/Worker.scala)16
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala (renamed from core/src/main/scala/spark/deploy/worker/WorkerArguments.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/WorkerSource.scala (renamed from core/src/main/scala/spark/deploy/worker/WorkerSource.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala (renamed from core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala)12
-rw-r--r--core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala (renamed from core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala)14
-rw-r--r--core/src/main/scala/org/apache/spark/executor/Executor.scala (renamed from core/src/main/scala/spark/executor/Executor.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala (renamed from core/src/main/scala/spark/executor/ExecutorBackend.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/executor/ExecutorExitCode.scala (renamed from core/src/main/scala/spark/executor/ExecutorExitCode.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala (renamed from core/src/main/scala/spark/executor/ExecutorSource.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/executor/ExecutorURLClassLoader.scala (renamed from core/src/main/scala/spark/executor/ExecutorURLClassLoader.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala (renamed from core/src/main/scala/spark/executor/MesosExecutorBackend.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/executor/StandaloneExecutorBackend.scala (renamed from core/src/main/scala/spark/executor/StandaloneExecutorBackend.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala (renamed from core/src/main/scala/spark/executor/TaskMetrics.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/io/CompressionCodec.scala (renamed from core/src/main/scala/spark/io/CompressionCodec.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala (renamed from core/src/main/scala/spark/metrics/MetricsConfig.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala (renamed from core/src/main/scala/spark/metrics/MetricsSystem.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala (renamed from core/src/main/scala/spark/metrics/sink/ConsoleSink.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala (renamed from core/src/main/scala/spark/metrics/sink/CsvSink.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala (renamed from core/src/main/scala/spark/metrics/sink/JmxSink.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala (renamed from core/src/main/scala/spark/metrics/sink/MetricsServlet.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/sink/Sink.scala (renamed from core/src/main/scala/spark/metrics/sink/Sink.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/source/JvmSource.scala (renamed from core/src/main/scala/spark/metrics/source/JvmSource.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/metrics/source/Source.scala (renamed from core/src/main/scala/spark/metrics/source/Source.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/network/BufferMessage.scala (renamed from core/src/main/scala/spark/network/BufferMessage.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/network/Connection.scala (renamed from core/src/main/scala/spark/network/Connection.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/network/ConnectionManager.scala (renamed from core/src/main/scala/spark/network/ConnectionManager.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala (renamed from core/src/main/scala/spark/network/ConnectionManagerId.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala (renamed from core/src/main/scala/spark/network/ConnectionManagerTest.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/network/Message.scala (renamed from core/src/main/scala/spark/network/Message.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/network/MessageChunk.scala (renamed from core/src/main/scala/spark/network/MessageChunk.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala (renamed from core/src/main/scala/spark/network/MessageChunkHeader.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/network/ReceiverTest.scala (renamed from core/src/main/scala/spark/network/ReceiverTest.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/network/SenderTest.scala (renamed from core/src/main/scala/spark/network/SenderTest.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala (renamed from core/src/main/scala/spark/network/netty/FileHeader.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala (renamed from core/src/main/scala/spark/network/netty/ShuffleCopier.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala (renamed from core/src/main/scala/spark/network/netty/ShuffleSender.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/package.scala (renamed from core/src/main/scala/spark/package.scala)12
-rw-r--r--core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala (renamed from core/src/main/scala/spark/partial/ApproximateActionListener.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/partial/ApproximateEvaluator.scala (renamed from core/src/main/scala/spark/partial/ApproximateEvaluator.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala (renamed from core/src/main/scala/spark/partial/BoundedDouble.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/partial/CountEvaluator.scala (renamed from core/src/main/scala/spark/partial/CountEvaluator.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala (renamed from core/src/main/scala/spark/partial/GroupedCountEvaluator.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala (renamed from core/src/main/scala/spark/partial/GroupedMeanEvaluator.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala (renamed from core/src/main/scala/spark/partial/GroupedSumEvaluator.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/partial/MeanEvaluator.scala (renamed from core/src/main/scala/spark/partial/MeanEvaluator.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/partial/PartialResult.scala (renamed from core/src/main/scala/spark/partial/PartialResult.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/partial/StudentTCacher.scala (renamed from core/src/main/scala/spark/partial/StudentTCacher.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/partial/SumEvaluator.scala (renamed from core/src/main/scala/spark/partial/SumEvaluator.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala (renamed from core/src/main/scala/spark/rdd/BlockRDD.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala (renamed from core/src/main/scala/spark/rdd/CartesianRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala (renamed from core/src/main/scala/spark/rdd/CheckpointRDD.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala (renamed from core/src/main/scala/spark/rdd/CoGroupedRDD.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala (renamed from core/src/main/scala/spark/rdd/CoalescedRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/EmptyRDD.scala (renamed from core/src/main/scala/spark/rdd/EmptyRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala (renamed from core/src/main/scala/spark/rdd/FilteredRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala (renamed from core/src/main/scala/spark/rdd/FlatMappedRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala (renamed from core/src/main/scala/spark/rdd/FlatMappedValuesRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala (renamed from core/src/main/scala/spark/rdd/GlommedRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala (renamed from core/src/main/scala/spark/rdd/HadoopRDD.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala (renamed from core/src/main/scala/spark/rdd/JdbcRDD.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala (renamed from core/src/main/scala/spark/rdd/MapPartitionsRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/MapPartitionsWithIndexRDD.scala (renamed from core/src/main/scala/spark/rdd/MapPartitionsWithIndexRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/MappedRDD.scala (renamed from core/src/main/scala/spark/rdd/MappedRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala (renamed from core/src/main/scala/spark/rdd/MappedValuesRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala (renamed from core/src/main/scala/spark/rdd/NewHadoopRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala (renamed from core/src/main/scala/spark/rdd/OrderedRDDFunctions.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala (renamed from core/src/main/scala/spark/rdd/ParallelCollectionRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala (renamed from core/src/main/scala/spark/rdd/PartitionPruningRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala (renamed from core/src/main/scala/spark/rdd/PipedRDD.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala (renamed from core/src/main/scala/spark/rdd/SampledRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala (renamed from core/src/main/scala/spark/rdd/ShuffledRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala (renamed from core/src/main/scala/spark/rdd/SubtractedRDD.scala)18
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala (renamed from core/src/main/scala/spark/rdd/UnionRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala (renamed from core/src/main/scala/spark/rdd/ZippedPartitionsRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala (renamed from core/src/main/scala/spark/rdd/ZippedRDD.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala (renamed from core/src/main/scala/spark/scheduler/ActiveJob.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala (renamed from core/src/main/scala/spark/scheduler/DAGScheduler.scala)14
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala (renamed from core/src/main/scala/spark/scheduler/DAGSchedulerEvent.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala (renamed from core/src/main/scala/spark/scheduler/DAGSchedulerSource.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala (renamed from core/src/main/scala/spark/scheduler/InputFormatInfo.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/JobListener.scala (renamed from core/src/main/scala/spark/scheduler/JobListener.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala (renamed from core/src/main/scala/spark/scheduler/JobLogger.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/JobResult.scala (renamed from core/src/main/scala/spark/scheduler/JobResult.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala (renamed from core/src/main/scala/spark/scheduler/JobWaiter.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala (renamed from core/src/main/scala/spark/scheduler/MapStatus.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala (renamed from core/src/main/scala/spark/scheduler/ResultTask.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala (renamed from core/src/main/scala/spark/scheduler/ShuffleMapTask.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala (renamed from core/src/main/scala/spark/scheduler/SparkListener.scala)12
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala (renamed from core/src/main/scala/spark/scheduler/SparkListenerBus.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/SplitInfo.scala (renamed from core/src/main/scala/spark/scheduler/SplitInfo.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/Stage.scala (renamed from core/src/main/scala/spark/scheduler/Stage.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala (renamed from core/src/main/scala/spark/scheduler/StageInfo.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/Task.scala (renamed from core/src/main/scala/spark/scheduler/Task.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskLocation.scala (renamed from core/src/main/scala/spark/scheduler/TaskLocation.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala (renamed from core/src/main/scala/spark/scheduler/TaskResult.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala (renamed from core/src/main/scala/spark/scheduler/TaskScheduler.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerListener.scala (renamed from core/src/main/scala/spark/scheduler/TaskSchedulerListener.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/TaskSet.scala (renamed from core/src/main/scala/spark/scheduler/TaskSet.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterScheduler.scala (renamed from core/src/main/scala/spark/scheduler/cluster/ClusterScheduler.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManager.scala (renamed from core/src/main/scala/spark/scheduler/cluster/ClusterTaskSetManager.scala)18
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/ExecutorLossReason.scala (renamed from core/src/main/scala/spark/scheduler/cluster/ExecutorLossReason.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/Pool.scala (renamed from core/src/main/scala/spark/scheduler/cluster/Pool.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/Schedulable.scala (renamed from core/src/main/scala/spark/scheduler/cluster/Schedulable.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulableBuilder.scala (renamed from core/src/main/scala/spark/scheduler/cluster/SchedulableBuilder.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulerBackend.scala (renamed from core/src/main/scala/spark/scheduler/cluster/SchedulerBackend.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingAlgorithm.scala (renamed from core/src/main/scala/spark/scheduler/cluster/SchedulingAlgorithm.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingMode.scala (renamed from core/src/main/scala/spark/scheduler/cluster/SchedulingMode.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala (renamed from core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala)11
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneClusterMessage.scala (renamed from core/src/main/scala/spark/scheduler/cluster/StandaloneClusterMessage.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala (renamed from core/src/main/scala/spark/scheduler/cluster/StandaloneSchedulerBackend.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/TaskDescription.scala (renamed from core/src/main/scala/spark/scheduler/cluster/TaskDescription.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/TaskInfo.scala (renamed from core/src/main/scala/spark/scheduler/cluster/TaskInfo.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/TaskLocality.scala (renamed from core/src/main/scala/spark/scheduler/cluster/TaskLocality.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/TaskSetManager.scala (renamed from core/src/main/scala/spark/scheduler/cluster/TaskSetManager.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/cluster/WorkerOffer.scala (renamed from core/src/main/scala/spark/scheduler/cluster/WorkerOffer.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/local/LocalScheduler.scala (renamed from core/src/main/scala/spark/scheduler/local/LocalScheduler.scala)14
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala (renamed from core/src/main/scala/spark/scheduler/local/LocalTaskSetManager.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala (renamed from core/src/main/scala/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala)18
-rw-r--r--core/src/main/scala/org/apache/spark/scheduler/mesos/MesosSchedulerBackend.scala (renamed from core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/Serializer.scala (renamed from core/src/main/scala/spark/serializer/Serializer.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala (renamed from core/src/main/scala/spark/serializer/SerializerManager.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockException.scala (renamed from core/src/main/scala/spark/storage/BlockException.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockFetchTracker.scala (renamed from core/src/main/scala/spark/storage/BlockFetchTracker.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala (renamed from core/src/main/scala/spark/storage/BlockFetcherIterator.scala)16
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManager.scala (renamed from core/src/main/scala/spark/storage/BlockManager.scala)12
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala (renamed from core/src/main/scala/spark/storage/BlockManagerId.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala (renamed from core/src/main/scala/spark/storage/BlockManagerMaster.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala (renamed from core/src/main/scala/spark/storage/BlockManagerMasterActor.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala (renamed from core/src/main/scala/spark/storage/BlockManagerMessages.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala (renamed from core/src/main/scala/spark/storage/BlockManagerSlaveActor.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala (renamed from core/src/main/scala/spark/storage/BlockManagerSource.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala (renamed from core/src/main/scala/spark/storage/BlockManagerWorker.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockMessage.scala (renamed from core/src/main/scala/spark/storage/BlockMessage.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockMessageArray.scala (renamed from core/src/main/scala/spark/storage/BlockMessageArray.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala (renamed from core/src/main/scala/spark/storage/BlockObjectWriter.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/BlockStore.scala (renamed from core/src/main/scala/spark/storage/BlockStore.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/DiskStore.scala (renamed from core/src/main/scala/spark/storage/DiskStore.scala)14
-rw-r--r--core/src/main/scala/org/apache/spark/storage/MemoryStore.scala (renamed from core/src/main/scala/spark/storage/MemoryStore.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/PutResult.scala (renamed from core/src/main/scala/spark/storage/PutResult.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala (renamed from core/src/main/scala/spark/storage/ShuffleBlockManager.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/StorageLevel.scala (renamed from core/src/main/scala/spark/storage/StorageLevel.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/StorageUtils.scala (renamed from core/src/main/scala/spark/storage/StorageUtils.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala (renamed from core/src/main/scala/spark/storage/ThreadingTest.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/ui/JettyUtils.scala (renamed from core/src/main/scala/spark/ui/JettyUtils.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/ui/Page.scala (renamed from core/src/main/scala/spark/ui/Page.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/ui/SparkUI.scala (renamed from core/src/main/scala/spark/ui/SparkUI.scala)16
-rw-r--r--core/src/main/scala/org/apache/spark/ui/UIUtils.scala (renamed from core/src/main/scala/spark/ui/UIUtils.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala (renamed from core/src/main/scala/spark/ui/UIWorkloadGenerator.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala (renamed from core/src/main/scala/spark/ui/env/EnvironmentUI.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala (renamed from core/src/main/scala/spark/ui/exec/ExecutorsUI.scala)16
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala (renamed from core/src/main/scala/spark/ui/jobs/IndexPage.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala (renamed from core/src/main/scala/spark/ui/jobs/JobProgressListener.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala (renamed from core/src/main/scala/spark/ui/jobs/JobProgressUI.scala)12
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala (renamed from core/src/main/scala/spark/ui/jobs/PoolPage.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/PoolTable.scala (renamed from core/src/main/scala/spark/ui/jobs/PoolTable.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala (renamed from core/src/main/scala/spark/ui/jobs/StagePage.scala)14
-rw-r--r--core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala (renamed from core/src/main/scala/spark/ui/jobs/StageTable.scala)8
-rw-r--r--core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala (renamed from core/src/main/scala/spark/ui/storage/BlockManagerUI.scala)6
-rw-r--r--core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala (renamed from core/src/main/scala/spark/ui/storage/IndexPage.scala)10
-rw-r--r--core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala (renamed from core/src/main/scala/spark/ui/storage/RDDPage.scala)12
-rw-r--r--core/src/main/scala/org/apache/spark/util/AkkaUtils.scala (renamed from core/src/main/scala/spark/util/AkkaUtils.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala (renamed from core/src/main/scala/spark/util/BoundedPriorityQueue.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala (renamed from core/src/main/scala/spark/util/ByteBufferInputStream.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/util/Clock.scala (renamed from core/src/main/scala/spark/util/Clock.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/CompletionIterator.scala (renamed from core/src/main/scala/spark/util/CompletionIterator.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/Distribution.scala (renamed from core/src/main/scala/spark/util/Distribution.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/IdGenerator.scala (renamed from core/src/main/scala/spark/util/IdGenerator.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/IntParam.scala (renamed from core/src/main/scala/spark/util/IntParam.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/MemoryParam.scala (renamed from core/src/main/scala/spark/util/MemoryParam.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala (renamed from core/src/main/scala/spark/util/MetadataCleaner.scala)4
-rw-r--r--core/src/main/scala/org/apache/spark/util/MutablePair.scala (renamed from core/src/main/scala/spark/util/MutablePair.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/NextIterator.scala (renamed from core/src/main/scala/spark/util/NextIterator.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/RateLimitedOutputStream.scala (renamed from core/src/main/scala/spark/util/RateLimitedOutputStream.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala (renamed from core/src/main/scala/spark/util/SerializableBuffer.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/StatCounter.scala (renamed from core/src/main/scala/spark/util/StatCounter.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala (renamed from core/src/main/scala/spark/util/TimeStampedHashMap.scala)7
-rw-r--r--core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala (renamed from core/src/main/scala/spark/util/TimeStampedHashSet.scala)2
-rw-r--r--core/src/main/scala/org/apache/spark/util/Vector.scala (renamed from core/src/main/scala/spark/util/Vector.scala)4
-rw-r--r--core/src/test/resources/test_metrics_config.properties2
-rw-r--r--core/src/test/resources/test_metrics_system.properties6
-rw-r--r--core/src/test/scala/org/apache/spark/AccumulatorSuite.scala (renamed from core/src/test/scala/spark/AccumulatorSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/BroadcastSuite.scala (renamed from core/src/test/scala/spark/BroadcastSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/CheckpointSuite.scala (renamed from core/src/test/scala/spark/CheckpointSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/ClosureCleanerSuite.scala (renamed from core/src/test/scala/spark/ClosureCleanerSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/DistributedSuite.scala (renamed from core/src/test/scala/spark/DistributedSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/DriverSuite.scala (renamed from core/src/test/scala/spark/DriverSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/FailureSuite.scala (renamed from core/src/test/scala/spark/FailureSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/FileServerSuite.scala (renamed from core/src/test/scala/spark/FileServerSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/FileSuite.scala (renamed from core/src/test/scala/spark/FileSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/JavaAPISuite.java (renamed from core/src/test/scala/spark/JavaAPISuite.java)20
-rw-r--r--core/src/test/scala/org/apache/spark/KryoSerializerSuite.scala (renamed from core/src/test/scala/spark/KryoSerializerSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/LocalSparkContext.scala (renamed from core/src/test/scala/spark/LocalSparkContext.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala (renamed from core/src/test/scala/spark/MapOutputTrackerSuite.scala)8
-rw-r--r--core/src/test/scala/org/apache/spark/PairRDDFunctionsSuite.scala (renamed from core/src/test/scala/spark/PairRDDFunctionsSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/PartitionPruningRDDSuite.scala (renamed from core/src/test/scala/spark/PartitionPruningRDDSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/PartitioningSuite.scala (renamed from core/src/test/scala/spark/PartitioningSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/PipedRDDSuite.scala (renamed from core/src/test/scala/spark/PipedRDDSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/RDDSuite.scala (renamed from core/src/test/scala/spark/RDDSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/SharedSparkContext.scala (renamed from core/src/test/scala/spark/SharedSparkContext.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala (renamed from core/src/test/scala/spark/ShuffleNettySuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/ShuffleSuite.scala (renamed from core/src/test/scala/spark/ShuffleSuite.scala)16
-rw-r--r--core/src/test/scala/org/apache/spark/SizeEstimatorSuite.scala (renamed from core/src/test/scala/spark/SizeEstimatorSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/SortingSuite.scala (renamed from core/src/test/scala/spark/SortingSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala (renamed from core/src/test/scala/spark/SparkContextInfoSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/ThreadingSuite.scala (renamed from core/src/test/scala/spark/ThreadingSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/UnpersistSuite.scala (renamed from core/src/test/scala/spark/UnpersistSuite.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/UtilsSuite.scala (renamed from core/src/test/scala/spark/UtilsSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala (renamed from core/src/test/scala/spark/ZippedPartitionsSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala (renamed from core/src/test/scala/spark/io/CompressionCodecSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala (renamed from core/src/test/scala/spark/metrics/MetricsConfigSuite.scala)14
-rw-r--r--core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala (renamed from core/src/test/scala/spark/metrics/MetricsSystemSuite.scala)5
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala (renamed from core/src/test/scala/spark/rdd/JdbcRDDSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala (renamed from core/src/test/scala/spark/rdd/ParallelCollectionSplitSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala (renamed from core/src/test/scala/spark/scheduler/DAGSchedulerSuite.scala)28
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala (renamed from core/src/test/scala/spark/scheduler/JobLoggerSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala (renamed from core/src/test/scala/spark/scheduler/SparkListenerSuite.scala)6
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala (renamed from core/src/test/scala/spark/scheduler/TaskContextSuite.scala)12
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala (renamed from core/src/test/scala/spark/scheduler/cluster/ClusterSchedulerSuite.scala)8
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala (renamed from core/src/test/scala/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala)10
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/cluster/FakeTask.scala (renamed from core/src/test/scala/spark/scheduler/cluster/FakeTask.scala)4
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala (renamed from core/src/test/scala/spark/scheduler/local/LocalSchedulerSuite.scala)8
-rw-r--r--core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala (renamed from core/src/test/scala/spark/storage/BlockManagerSuite.scala)33
-rw-r--r--core/src/test/scala/org/apache/spark/ui/UISuite.scala (renamed from core/src/test/scala/spark/ui/UISuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/util/DistributionSuite.scala (renamed from core/src/test/scala/spark/util/DistributionSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/util/FakeClock.scala (renamed from core/src/test/scala/spark/util/FakeClock.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala (renamed from core/src/test/scala/spark/util/NextIteratorSuite.scala)2
-rw-r--r--core/src/test/scala/org/apache/spark/util/RateLimitedOutputStreamSuite.scala (renamed from core/src/test/scala/spark/util/RateLimitedOutputStreamSuite.scala)2
-rwxr-xr-xdocs/_layouts/global.html2
-rw-r--r--examples/pom.xml14
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaHdfsLR.java (renamed from examples/src/main/java/spark/examples/JavaHdfsLR.java)10
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaKMeans.java (renamed from examples/src/main/java/spark/examples/JavaKMeans.java)14
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java (renamed from examples/src/main/java/spark/examples/JavaLogQuery.java)12
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaPageRank.java (renamed from examples/src/main/java/spark/examples/JavaPageRank.java)18
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java (renamed from examples/src/main/java/spark/examples/JavaSparkPi.java)10
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaTC.java (renamed from examples/src/main/java/spark/examples/JavaTC.java)8
-rw-r--r--examples/src/main/java/org/apache/spark/examples/JavaWordCount.java (renamed from examples/src/main/java/spark/examples/JavaWordCount.java)14
-rw-r--r--examples/src/main/java/org/apache/spark/mllib/examples/JavaALS.java (renamed from examples/src/main/java/spark/mllib/examples/JavaALS.java)14
-rw-r--r--examples/src/main/java/org/apache/spark/mllib/examples/JavaKMeans.java (renamed from examples/src/main/java/spark/mllib/examples/JavaKMeans.java)12
-rw-r--r--examples/src/main/java/org/apache/spark/mllib/examples/JavaLR.java (renamed from examples/src/main/java/spark/mllib/examples/JavaLR.java)14
-rw-r--r--examples/src/main/java/org/apache/spark/streaming/examples/JavaFlumeEventCount.java (renamed from examples/src/main/java/spark/streaming/examples/JavaFlumeEventCount.java)10
-rw-r--r--examples/src/main/java/org/apache/spark/streaming/examples/JavaNetworkWordCount.java (renamed from examples/src/main/java/spark/streaming/examples/JavaNetworkWordCount.java)16
-rw-r--r--examples/src/main/java/org/apache/spark/streaming/examples/JavaQueueStream.java (renamed from examples/src/main/java/spark/streaming/examples/JavaQueueStream.java)16
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala (renamed from examples/src/main/scala/spark/examples/BroadcastTest.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala (renamed from examples/src/main/scala/spark/examples/CassandraTest.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/ExceptionHandlingTest.scala (renamed from examples/src/main/scala/spark/examples/ExceptionHandlingTest.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala (renamed from examples/src/main/scala/spark/examples/GroupByTest.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala (renamed from examples/src/main/scala/spark/examples/HBaseTest.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala (renamed from examples/src/main/scala/spark/examples/HdfsTest.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/LocalALS.scala (renamed from examples/src/main/scala/spark/examples/LocalALS.scala)2
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/LocalFileLR.scala (renamed from examples/src/main/scala/spark/examples/LocalFileLR.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala (renamed from examples/src/main/scala/spark/examples/LocalKMeans.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/LocalLR.scala (renamed from examples/src/main/scala/spark/examples/LocalLR.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/LocalPi.scala (renamed from examples/src/main/scala/spark/examples/LocalPi.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/LogQuery.scala (renamed from examples/src/main/scala/spark/examples/LogQuery.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala (renamed from examples/src/main/scala/spark/examples/MultiBroadcastTest.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala (renamed from examples/src/main/scala/spark/examples/SimpleSkewedGroupByTest.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala (renamed from examples/src/main/scala/spark/examples/SkewedGroupByTest.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkALS.scala (renamed from examples/src/main/scala/spark/examples/SparkALS.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkHdfsLR.scala (renamed from examples/src/main/scala/spark/examples/SparkHdfsLR.scala)8
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala (renamed from examples/src/main/scala/spark/examples/SparkKMeans.scala)8
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkLR.scala (renamed from examples/src/main/scala/spark/examples/SparkLR.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala (renamed from examples/src/main/scala/spark/examples/SparkPageRank.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkPi.scala (renamed from examples/src/main/scala/spark/examples/SparkPi.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/SparkTC.scala (renamed from examples/src/main/scala/spark/examples/SparkTC.scala)4
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala (renamed from examples/src/main/scala/spark/examples/bagel/PageRankUtils.scala)10
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRank.scala (renamed from examples/src/main/scala/spark/examples/bagel/WikipediaPageRank.scala)12
-rw-r--r--examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala (renamed from examples/src/main/scala/spark/examples/bagel/WikipediaPageRankStandalone.scala)12
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/ActorWordCount.scala (renamed from examples/src/main/scala/spark/streaming/examples/ActorWordCount.scala)14
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/FlumeEventCount.scala (renamed from examples/src/main/scala/spark/streaming/examples/FlumeEventCount.scala)8
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/HdfsWordCount.scala (renamed from examples/src/main/scala/spark/streaming/examples/HdfsWordCount.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/KafkaWordCount.scala (renamed from examples/src/main/scala/spark/streaming/examples/KafkaWordCount.scala)12
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala (renamed from examples/src/main/scala/spark/streaming/examples/NetworkWordCount.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/QueueStream.scala (renamed from examples/src/main/scala/spark/streaming/examples/QueueStream.scala)8
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/RawNetworkGrep.scala (renamed from examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala)12
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/StatefulNetworkWordCount.scala (renamed from examples/src/main/scala/spark/streaming/examples/StatefulNetworkWordCount.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdCMS.scala (renamed from examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdCMS.scala)10
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdHLL.scala (renamed from examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdHLL.scala)8
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/TwitterPopularTags.scala (renamed from examples/src/main/scala/spark/streaming/examples/TwitterPopularTags.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/ZeroMQWordCount.scala (renamed from examples/src/main/scala/spark/streaming/examples/ZeroMQWordCount.scala)6
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala (renamed from examples/src/main/scala/spark/streaming/examples/clickstream/PageViewGenerator.scala)2
-rw-r--r--examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewStream.scala (renamed from examples/src/main/scala/spark/streaming/examples/clickstream/PageViewStream.scala)8
-rw-r--r--mllib/pom.xml6
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala (renamed from mllib/src/main/scala/spark/mllib/classification/ClassificationModel.scala)4
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala (renamed from mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala)12
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/classification/SVM.scala (renamed from mllib/src/main/scala/spark/mllib/classification/SVM.scala)12
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala (renamed from mllib/src/main/scala/spark/mllib/clustering/KMeans.scala)10
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala (renamed from mllib/src/main/scala/spark/mllib/clustering/KMeansModel.scala)8
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/clustering/LocalKMeans.scala (renamed from mllib/src/main/scala/spark/mllib/clustering/LocalKMeans.scala)2
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/optimization/Gradient.scala (renamed from mllib/src/main/scala/spark/mllib/optimization/Gradient.scala)2
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala (renamed from mllib/src/main/scala/spark/mllib/optimization/GradientDescent.scala)6
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/optimization/Optimizer.scala (renamed from mllib/src/main/scala/spark/mllib/optimization/Optimizer.scala)4
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/optimization/Updater.scala (renamed from mllib/src/main/scala/spark/mllib/optimization/Updater.scala)2
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala (renamed from mllib/src/main/scala/spark/mllib/recommendation/ALS.scala)12
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala (renamed from mllib/src/main/scala/spark/mllib/recommendation/MatrixFactorizationModel.scala)6
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/regression/GeneralizedLinearAlgorithm.scala (renamed from mllib/src/main/scala/spark/mllib/regression/GeneralizedLinearAlgorithm.scala)8
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala (renamed from mllib/src/main/scala/spark/mllib/regression/LabeledPoint.scala)2
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala (renamed from mllib/src/main/scala/spark/mllib/regression/Lasso.scala)8
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala (renamed from mllib/src/main/scala/spark/mllib/regression/LinearRegression.scala)8
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/regression/RegressionModel.scala (renamed from mllib/src/main/scala/spark/mllib/regression/RegressionModel.scala)4
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala (renamed from mllib/src/main/scala/spark/mllib/regression/RidgeRegression.scala)8
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/util/DataValidators.scala (renamed from mllib/src/main/scala/spark/mllib/util/DataValidators.scala)6
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/util/KMeansDataGenerator.scala (renamed from mllib/src/main/scala/spark/mllib/util/KMeansDataGenerator.scala)4
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala (renamed from mllib/src/main/scala/spark/mllib/util/LinearDataGenerator.scala)8
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/util/LogisticRegressionDataGenerator.scala (renamed from mllib/src/main/scala/spark/mllib/util/LogisticRegressionDataGenerator.scala)6
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala (renamed from mllib/src/main/scala/spark/mllib/util/MFDataGenerator.scala)8
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/util/MLUtils.scala (renamed from mllib/src/main/scala/spark/mllib/util/MLUtils.scala)8
-rw-r--r--mllib/src/main/scala/org/apache/spark/mllib/util/SVMDataGenerator.scala (renamed from mllib/src/main/scala/spark/mllib/util/SVMDataGenerator.scala)6
-rw-r--r--mllib/src/test/java/org/apache/spark/mllib/classification/JavaLogisticRegressionSuite.java (renamed from mllib/src/test/java/spark/mllib/classification/JavaLogisticRegressionSuite.java)8
-rw-r--r--mllib/src/test/java/org/apache/spark/mllib/classification/JavaSVMSuite.java (renamed from mllib/src/test/java/spark/mllib/classification/JavaSVMSuite.java)8
-rw-r--r--mllib/src/test/java/org/apache/spark/mllib/clustering/JavaKMeansSuite.java (renamed from mllib/src/test/java/spark/mllib/clustering/JavaKMeansSuite.java)6
-rw-r--r--mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java (renamed from mllib/src/test/java/spark/mllib/recommendation/JavaALSSuite.java)6
-rw-r--r--mllib/src/test/java/org/apache/spark/mllib/regression/JavaLassoSuite.java (renamed from mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java)8
-rw-r--r--mllib/src/test/java/org/apache/spark/mllib/regression/JavaLinearRegressionSuite.java (renamed from mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java)8
-rw-r--r--mllib/src/test/java/org/apache/spark/mllib/regression/JavaRidgeRegressionSuite.java (renamed from mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java)8
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala (renamed from mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala)6
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala (renamed from mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala)10
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala (renamed from mllib/src/test/scala/spark/mllib/clustering/KMeansSuite.scala)6
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala (renamed from mllib/src/test/scala/spark/mllib/recommendation/ALSSuite.scala)6
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/regression/LassoSuite.scala (renamed from mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala)6
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala (renamed from mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala)8
-rw-r--r--mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala (renamed from mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala)8
-rw-r--r--pom.xml14
-rw-r--r--project/SparkBuild.scala12
-rw-r--r--python/pyspark/context.py4
-rw-r--r--python/pyspark/files.py2
-rw-r--r--python/pyspark/java_gateway.py4
-rw-r--r--repl-bin/pom.xml12
-rw-r--r--repl/pom.xml12
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala (renamed from repl/src/main/scala/spark/repl/ExecutorClassLoader.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/Main.scala (renamed from repl/src/main/scala/spark/repl/Main.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkHelper.scala (renamed from repl/src/main/scala/spark/repl/SparkHelper.scala)0
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala (renamed from repl/src/main/scala/spark/repl/SparkILoop.scala)18
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala (renamed from repl/src/main/scala/spark/repl/SparkIMain.scala)10
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkISettings.scala (renamed from repl/src/main/scala/spark/repl/SparkISettings.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkImports.scala (renamed from repl/src/main/scala/spark/repl/SparkImports.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala (renamed from repl/src/main/scala/spark/repl/SparkJLineCompletion.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala (renamed from repl/src/main/scala/spark/repl/SparkJLineReader.scala)2
-rw-r--r--repl/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala (renamed from repl/src/main/scala/spark/repl/SparkMemberHandlers.scala)2
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala (renamed from repl/src/test/scala/spark/repl/ReplSuite.scala)6
-rwxr-xr-xspark-executor2
-rwxr-xr-xspark-shell2
-rw-r--r--spark-shell.cmd2
-rw-r--r--streaming/pom.xml6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala (renamed from streaming/src/main/scala/spark/streaming/Checkpoint.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/DStream.scala (renamed from streaming/src/main/scala/spark/streaming/DStream.scala)16
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/DStreamCheckpointData.scala (renamed from streaming/src/main/scala/spark/streaming/DStreamCheckpointData.scala)4
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala (renamed from streaming/src/main/scala/spark/streaming/DStreamGraph.scala)4
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/Duration.scala (renamed from streaming/src/main/scala/spark/streaming/Duration.scala)10
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/Interval.scala (renamed from streaming/src/main/scala/spark/streaming/Interval.scala)2
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/Job.scala (renamed from streaming/src/main/scala/spark/streaming/Job.scala)2
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/JobManager.scala (renamed from streaming/src/main/scala/spark/streaming/JobManager.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/NetworkInputTracker.scala (renamed from streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala)12
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/PairDStreamFunctions.scala (renamed from streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala)26
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/Scheduler.scala (renamed from streaming/src/main/scala/spark/streaming/Scheduler.scala)9
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala (renamed from streaming/src/main/scala/spark/streaming/StreamingContext.scala)18
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/Time.scala (renamed from streaming/src/main/scala/spark/streaming/Time.scala)2
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala (renamed from streaming/src/main/scala/spark/streaming/api/java/JavaDStream.scala)16
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala (renamed from streaming/src/main/scala/spark/streaming/api/java/JavaDStreamLike.scala)10
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala (renamed from streaming/src/main/scala/spark/streaming/api/java/JavaPairDStream.scala)24
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala (renamed from streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala)19
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/CoGroupedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/CoGroupedDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/ConstantInputDStream.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/FileInputDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/FilteredDStream.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/FlatMapValuedDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/FlatMappedDStream.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/FlumeInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/FlumeInputDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/ForEachDStream.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/GlommedDStream.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/InputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/InputDStream.scala)4
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/KafkaInputDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/MapPartitionedDStream.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/MapValuedDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/MappedDStream.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/NetworkInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/NetworkInputDStream.scala)18
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/PluggableInputDStream.scala)4
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/RawInputDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/ReducedWindowedDStream.scala)16
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/ShuffledDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/SocketInputDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala)12
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/TransformedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/TransformedDStream.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/TwitterInputDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/TwitterInputDStream.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/UnionDStream.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala (renamed from streaming/src/main/scala/spark/streaming/dstream/WindowedDStream.scala)10
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/receivers/ActorReceiver.scala (renamed from streaming/src/main/scala/spark/streaming/receivers/ActorReceiver.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/receivers/ZeroMQReceiver.scala (renamed from streaming/src/main/scala/spark/streaming/receivers/ZeroMQReceiver.scala)4
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/util/Clock.scala (renamed from streaming/src/main/scala/spark/streaming/util/Clock.scala)2
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala (renamed from streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala)8
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala (renamed from streaming/src/main/scala/spark/streaming/util/RawTextHelper.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala (renamed from streaming/src/main/scala/spark/streaming/util/RawTextSender.scala)6
-rw-r--r--streaming/src/main/scala/org/apache/spark/streaming/util/RecurringTimer.scala (renamed from streaming/src/main/scala/spark/streaming/util/RecurringTimer.scala)2
-rw-r--r--streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java (renamed from streaming/src/test/java/spark/streaming/JavaAPISuite.java)32
-rw-r--r--streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala (renamed from streaming/src/test/java/spark/streaming/JavaTestUtils.scala)17
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala (renamed from streaming/src/test/scala/spark/streaming/BasicOperationsSuite.scala)6
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala (renamed from streaming/src/test/scala/spark/streaming/CheckpointSuite.scala)8
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala (renamed from streaming/src/test/scala/spark/streaming/FailureSuite.scala)6
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala (renamed from streaming/src/test/scala/spark/streaming/InputStreamsSuite.scala)12
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala (renamed from streaming/src/test/scala/spark/streaming/TestSuiteBase.scala)8
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala (renamed from streaming/src/test/scala/spark/streaming/WindowOperationsSuite.scala)6
-rw-r--r--tools/pom.xml8
-rw-r--r--tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala (renamed from tools/src/main/scala/spark/tools/JavaAPICompletenessChecker.scala)184
-rw-r--r--yarn/pom.xml6
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala (renamed from yarn/src/main/scala/spark/deploy/yarn/ApplicationMaster.scala)4
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala (renamed from yarn/src/main/scala/spark/deploy/yarn/ApplicationMasterArguments.scala)4
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala (renamed from yarn/src/main/scala/spark/deploy/yarn/Client.scala)6
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala (renamed from yarn/src/main/scala/spark/deploy/yarn/ClientArguments.scala)8
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/WorkerRunnable.scala (renamed from yarn/src/main/scala/spark/deploy/yarn/WorkerRunnable.scala)6
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocationHandler.scala (renamed from yarn/src/main/scala/spark/deploy/yarn/YarnAllocationHandler.scala)8
-rw-r--r--yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala (renamed from yarn/src/main/scala/spark/deploy/yarn/YarnSparkHadoopUtil.scala)4
-rw-r--r--yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterScheduler.scala (renamed from yarn/src/main/scala/spark/scheduler/cluster/YarnClusterScheduler.scala)6
521 files changed, 1788 insertions, 1777 deletions
diff --git a/README.md b/README.md
index 2ddfe862a2..c4170650f7 100644
--- a/README.md
+++ b/README.md
@@ -27,7 +27,7 @@ Or, for the Python API, the Python shell (`./pyspark`).
Spark also comes with several sample programs in the `examples` directory.
To run one of them, use `./run-example <class> <params>`. For example:
- ./run-example spark.examples.SparkLR local[2]
+ ./run-example org.apache.spark.examples.SparkLR local[2]
will run the Logistic Regression example locally on 2 CPUs.
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 74990b6361..dc63811b76 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-assembly</artifactId>
<name>Spark Project Assembly</name>
<url>http://spark-project.org/</url>
@@ -40,27 +40,27 @@
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-bagel</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-mllib</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-repl</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-streaming</artifactId>
<version>${project.version}</version>
</dependency>
@@ -121,7 +121,7 @@
<id>hadoop2-yarn</id>
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-yarn</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/assembly/src/main/assembly/assembly.xml b/assembly/src/main/assembly/assembly.xml
index 4543b52c93..47d3fa93d0 100644
--- a/assembly/src/main/assembly/assembly.xml
+++ b/assembly/src/main/assembly/assembly.xml
@@ -30,9 +30,9 @@
</fileSet>
<fileSet>
<directory>
- ${project.parent.basedir}/core/src/main/resources/spark/ui/static/
+ ${project.parent.basedir}/core/src/main/resources/org/apache/spark/ui/static/
</directory>
- <outputDirectory>/ui-resources/spark/ui/static</outputDirectory>
+ <outputDirectory>/ui-resources/org/apache/spark/ui/static</outputDirectory>
<includes>
<include>**/*</include>
</includes>
@@ -63,10 +63,10 @@
<dependencySets>
<dependencySet>
<includes>
- <include>org.spark-project:*:jar</include>
+ <include>org.apache.spark:*:jar</include>
</includes>
<excludes>
- <exclude>org.spark-project:spark-assembly:jar</exclude>
+ <exclude>org.apache.spark:spark-assembly:jar</exclude>
</excludes>
</dependencySet>
<dependencySet>
@@ -77,7 +77,7 @@
<useProjectArtifact>false</useProjectArtifact>
<excludes>
<exclude>org.apache.hadoop:*:jar</exclude>
- <exclude>org.spark-project:*:jar</exclude>
+ <exclude>org.apache.spark:*:jar</exclude>
</excludes>
</dependencySet>
</dependencySets>
diff --git a/bagel/pom.xml b/bagel/pom.xml
index cbcf8d1239..9340991377 100644
--- a/bagel/pom.xml
+++ b/bagel/pom.xml
@@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-bagel</artifactId>
<packaging>jar</packaging>
<name>Spark Project Bagel</name>
@@ -33,7 +33,7 @@
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/bagel/src/main/scala/spark/bagel/Bagel.scala b/bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala
index 80c8d53d2b..fec8737fcd 100644
--- a/bagel/src/main/scala/spark/bagel/Bagel.scala
+++ b/bagel/src/main/scala/org/apache/spark/bagel/Bagel.scala
@@ -15,32 +15,31 @@
* limitations under the License.
*/
-package spark.bagel
+package org.apache.spark.bagel
-import spark._
-import spark.SparkContext._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
-import scala.collection.mutable.ArrayBuffer
-import storage.StorageLevel
+import org.apache.spark.storage.StorageLevel
object Bagel extends Logging {
val DEFAULT_STORAGE_LEVEL = StorageLevel.MEMORY_AND_DISK
/**
* Runs a Bagel program.
- * @param sc [[spark.SparkContext]] to use for the program.
+ * @param sc [[org.apache.spark.SparkContext]] to use for the program.
* @param vertices vertices of the graph represented as an RDD of (Key, Vertex) pairs. Often the Key will be
* the vertex id.
* @param messages initial set of messages represented as an RDD of (Key, Message) pairs. Often this will be an
* empty array, i.e. sc.parallelize(Array[K, Message]()).
- * @param combiner [[spark.bagel.Combiner]] combines multiple individual messages to a given vertex into one
+ * @param combiner [[org.apache.spark.bagel.Combiner]] combines multiple individual messages to a given vertex into one
* message before sending (which often involves network I/O).
- * @param aggregator [[spark.bagel.Aggregator]] performs a reduce across all vertices after each superstep,
+ * @param aggregator [[org.apache.spark.bagel.Aggregator]] performs a reduce across all vertices after each superstep,
* and provides the result to each vertex in the next superstep.
- * @param partitioner [[spark.Partitioner]] partitions values by key
+ * @param partitioner [[org.apache.spark.Partitioner]] partitions values by key
* @param numPartitions number of partitions across which to split the graph.
* Default is the default parallelism of the SparkContext
- * @param storageLevel [[spark.storage.StorageLevel]] to use for caching of intermediate RDDs in each superstep.
+ * @param storageLevel [[org.apache.spark.storage.StorageLevel]] to use for caching of intermediate RDDs in each superstep.
* Defaults to caching in memory.
* @param compute function that takes a Vertex, optional set of (possibly combined) messages to the Vertex,
* optional Aggregator and the current superstep,
@@ -98,7 +97,7 @@ object Bagel extends Logging {
verts
}
- /** Runs a Bagel program with no [[spark.bagel.Aggregator]] and the default storage level */
+ /** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] and the default storage level */
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
sc: SparkContext,
vertices: RDD[(K, V)],
@@ -110,7 +109,7 @@ object Bagel extends Logging {
compute: (V, Option[C], Int) => (V, Array[M])
): RDD[(K, V)] = run(sc, vertices, messages, combiner, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)
- /** Runs a Bagel program with no [[spark.bagel.Aggregator]] */
+ /** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] */
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
sc: SparkContext,
vertices: RDD[(K, V)],
@@ -128,7 +127,7 @@ object Bagel extends Logging {
}
/**
- * Runs a Bagel program with no [[spark.bagel.Aggregator]], default [[spark.HashPartitioner]]
+ * Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], default [[org.apache.spark.HashPartitioner]]
* and default storage level
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
@@ -141,7 +140,7 @@ object Bagel extends Logging {
compute: (V, Option[C], Int) => (V, Array[M])
): RDD[(K, V)] = run(sc, vertices, messages, combiner, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)
- /** Runs a Bagel program with no [[spark.bagel.Aggregator]] and the default [[spark.HashPartitioner]]*/
+ /** Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]] and the default [[org.apache.spark.HashPartitioner]]*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest, C: Manifest](
sc: SparkContext,
vertices: RDD[(K, V)],
@@ -159,8 +158,8 @@ object Bagel extends Logging {
}
/**
- * Runs a Bagel program with no [[spark.bagel.Aggregator]], default [[spark.HashPartitioner]],
- * [[spark.bagel.DefaultCombiner]] and the default storage level
+ * Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], default [[org.apache.spark.HashPartitioner]],
+ * [[org.apache.spark.bagel.DefaultCombiner]] and the default storage level
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](
sc: SparkContext,
@@ -172,8 +171,8 @@ object Bagel extends Logging {
): RDD[(K, V)] = run(sc, vertices, messages, numPartitions, DEFAULT_STORAGE_LEVEL)(compute)
/**
- * Runs a Bagel program with no [[spark.bagel.Aggregator]], the default [[spark.HashPartitioner]]
- * and [[spark.bagel.DefaultCombiner]]
+ * Runs a Bagel program with no [[org.apache.spark.bagel.Aggregator]], the default [[org.apache.spark.HashPartitioner]]
+ * and [[org.apache.spark.bagel.DefaultCombiner]]
*/
def run[K: Manifest, V <: Vertex : Manifest, M <: Message[K] : Manifest](
sc: SparkContext,
diff --git a/bagel/src/test/scala/bagel/BagelSuite.scala b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
index ef2d57fbd0..7b954a4775 100644
--- a/bagel/src/test/scala/bagel/BagelSuite.scala
+++ b/bagel/src/test/scala/org/apache/spark/bagel/BagelSuite.scala
@@ -15,16 +15,14 @@
* limitations under the License.
*/
-package spark.bagel
+package org.apache.spark.bagel
-import org.scalatest.{FunSuite, Assertions, BeforeAndAfter}
+import org.scalatest.{BeforeAndAfter, FunSuite, Assertions}
import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._
-import scala.collection.mutable.ArrayBuffer
-
-import spark._
-import storage.StorageLevel
+import org.apache.spark._
+import org.apache.spark.storage.StorageLevel
class TestVertex(val active: Boolean, val age: Int) extends Vertex with Serializable
class TestMessage(val targetId: String) extends Message[String] with Serializable
diff --git a/bin/start-master.sh b/bin/start-master.sh
index 2288fb19d7..648c7ae75f 100755
--- a/bin/start-master.sh
+++ b/bin/start-master.sh
@@ -49,4 +49,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi
-"$bin"/spark-daemon.sh start spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
+"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
diff --git a/bin/start-slave.sh b/bin/start-slave.sh
index d6db16882d..4eefa20944 100755
--- a/bin/start-slave.sh
+++ b/bin/start-slave.sh
@@ -32,4 +32,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi
-"$bin"/spark-daemon.sh start spark.deploy.worker.Worker "$@"
+"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
diff --git a/bin/stop-master.sh b/bin/stop-master.sh
index 31a610bf9d..310e33bedc 100755
--- a/bin/stop-master.sh
+++ b/bin/stop-master.sh
@@ -24,4 +24,4 @@ bin=`cd "$bin"; pwd`
. "$bin/spark-config.sh"
-"$bin"/spark-daemon.sh stop spark.deploy.master.Master 1
+"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
diff --git a/bin/stop-slaves.sh b/bin/stop-slaves.sh
index 8e056f23d4..03e416a132 100755
--- a/bin/stop-slaves.sh
+++ b/bin/stop-slaves.sh
@@ -29,9 +29,9 @@ if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
fi
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
- "$bin"/spark-daemons.sh stop spark.deploy.worker.Worker 1
+ "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
else
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
- "$bin"/spark-daemons.sh stop spark.deploy.worker.Worker $(( $i + 1 ))
+ "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
done
fi
diff --git a/core/pom.xml b/core/pom.xml
index 53696367e9..c803217f96 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<packaging>jar</packaging>
<name>Spark Project Core</name>
diff --git a/core/src/main/java/spark/network/netty/FileClient.java b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
index 0625a6d502..20a7a3aa8c 100644
--- a/core/src/main/java/spark/network/netty/FileClient.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileClient.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network.netty;
+package org.apache.spark.network.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
diff --git a/core/src/main/java/spark/network/netty/FileClientChannelInitializer.java b/core/src/main/java/org/apache/spark/network/netty/FileClientChannelInitializer.java
index 05ad4b61d7..65ee15d63b 100644
--- a/core/src/main/java/spark/network/netty/FileClientChannelInitializer.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileClientChannelInitializer.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network.netty;
+package org.apache.spark.network.netty;
import io.netty.buffer.BufType;
import io.netty.channel.ChannelInitializer;
diff --git a/core/src/main/java/spark/network/netty/FileClientHandler.java b/core/src/main/java/org/apache/spark/network/netty/FileClientHandler.java
index e8cd9801f6..c4aa2669e0 100644
--- a/core/src/main/java/spark/network/netty/FileClientHandler.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileClientHandler.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network.netty;
+package org.apache.spark.network.netty;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
diff --git a/core/src/main/java/spark/network/netty/FileServer.java b/core/src/main/java/org/apache/spark/network/netty/FileServer.java
index 9f009a61d5..666432474d 100644
--- a/core/src/main/java/spark/network/netty/FileServer.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileServer.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network.netty;
+package org.apache.spark.network.netty;
import java.net.InetSocketAddress;
diff --git a/core/src/main/java/spark/network/netty/FileServerChannelInitializer.java b/core/src/main/java/org/apache/spark/network/netty/FileServerChannelInitializer.java
index 50c57a81a3..833af1632d 100644
--- a/core/src/main/java/spark/network/netty/FileServerChannelInitializer.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileServerChannelInitializer.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network.netty;
+package org.apache.spark.network.netty;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.socket.SocketChannel;
diff --git a/core/src/main/java/spark/network/netty/FileServerHandler.java b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
index 176ba8da49..d3d57a0255 100644
--- a/core/src/main/java/spark/network/netty/FileServerHandler.java
+++ b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network.netty;
+package org.apache.spark.network.netty;
import java.io.File;
import java.io.FileInputStream;
diff --git a/core/src/main/java/spark/network/netty/PathResolver.java b/core/src/main/java/org/apache/spark/network/netty/PathResolver.java
index f446c55b19..94c034cad0 100755
--- a/core/src/main/java/spark/network/netty/PathResolver.java
+++ b/core/src/main/java/org/apache/spark/network/netty/PathResolver.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network.netty;
+package org.apache.spark.network.netty;
public interface PathResolver {
diff --git a/core/src/main/resources/spark/ui/static/bootstrap.min.css b/core/src/main/resources/org/apache/spark/ui/static/bootstrap.min.css
index 13cef3d6f1..13cef3d6f1 100755
--- a/core/src/main/resources/spark/ui/static/bootstrap.min.css
+++ b/core/src/main/resources/org/apache/spark/ui/static/bootstrap.min.css
diff --git a/core/src/main/resources/spark/ui/static/sorttable.js b/core/src/main/resources/org/apache/spark/ui/static/sorttable.js
index 7abb9011cc..7abb9011cc 100644
--- a/core/src/main/resources/spark/ui/static/sorttable.js
+++ b/core/src/main/resources/org/apache/spark/ui/static/sorttable.js
diff --git a/core/src/main/resources/spark/ui/static/spark-logo-77x50px-hd.png b/core/src/main/resources/org/apache/spark/ui/static/spark-logo-77x50px-hd.png
index 6c5f0993c4..6c5f0993c4 100644
--- a/core/src/main/resources/spark/ui/static/spark-logo-77x50px-hd.png
+++ b/core/src/main/resources/org/apache/spark/ui/static/spark-logo-77x50px-hd.png
Binary files differ
diff --git a/core/src/main/resources/spark/ui/static/spark_logo.png b/core/src/main/resources/org/apache/spark/ui/static/spark_logo.png
index 4b18734779..4b18734779 100644
--- a/core/src/main/resources/spark/ui/static/spark_logo.png
+++ b/core/src/main/resources/org/apache/spark/ui/static/spark_logo.png
Binary files differ
diff --git a/core/src/main/resources/spark/ui/static/webui.css b/core/src/main/resources/org/apache/spark/ui/static/webui.css
index a600c06c04..a600c06c04 100644
--- a/core/src/main/resources/spark/ui/static/webui.css
+++ b/core/src/main/resources/org/apache/spark/ui/static/webui.css
diff --git a/core/src/main/scala/spark/Accumulators.scala b/core/src/main/scala/org/apache/spark/Accumulators.scala
index 6ff92ce833..5177ee58fa 100644
--- a/core/src/main/scala/spark/Accumulators.scala
+++ b/core/src/main/scala/org/apache/spark/Accumulators.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io._
@@ -28,7 +28,7 @@ import scala.collection.generic.Growable
*
* You must define how to add data, and how to merge two of these together. For some datatypes,
* such as a counter, these might be the same operation. In that case, you can use the simpler
- * [[spark.Accumulator]]. They won't always be the same, though -- e.g., imagine you are
+ * [[org.apache.spark.Accumulator]]. They won't always be the same, though -- e.g., imagine you are
* accumulating a set. You will add items to the set, and you will union two sets together.
*
* @param initialValue initial value of accumulator
@@ -176,7 +176,7 @@ class GrowableAccumulableParam[R <% Growable[T] with TraversableOnce[T] with Ser
def zero(initialValue: R): R = {
// We need to clone initialValue, but it's hard to specify that R should also be Cloneable.
// Instead we'll serialize it to a buffer and load it back.
- val ser = (new spark.JavaSerializer).newInstance()
+ val ser = new JavaSerializer().newInstance()
val copy = ser.deserialize[R](ser.serialize(initialValue))
copy.clear() // In case it contained stuff
copy
@@ -184,7 +184,7 @@ class GrowableAccumulableParam[R <% Growable[T] with TraversableOnce[T] with Ser
}
/**
- * A simpler value of [[spark.Accumulable]] where the result type being accumulated is the same
+ * A simpler value of [[org.apache.spark.Accumulable]] where the result type being accumulated is the same
* as the types of elements being merged.
*
* @param initialValue initial value of accumulator
@@ -195,7 +195,7 @@ class Accumulator[T](@transient initialValue: T, param: AccumulatorParam[T])
extends Accumulable[T,T](initialValue, param)
/**
- * A simpler version of [[spark.AccumulableParam]] where the only datatype you can add in is the same type
+ * A simpler version of [[org.apache.spark.AccumulableParam]] where the only datatype you can add in is the same type
* as the accumulated value. An implicit AccumulatorParam object needs to be available when you create
* Accumulators of a specific type.
*
diff --git a/core/src/main/scala/spark/Aggregator.scala b/core/src/main/scala/org/apache/spark/Aggregator.scala
index 9af401986d..3ef402926e 100644
--- a/core/src/main/scala/spark/Aggregator.scala
+++ b/core/src/main/scala/org/apache/spark/Aggregator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.util.{HashMap => JHashMap}
diff --git a/core/src/main/scala/spark/BlockStoreShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
index 1ec95ed9b8..908ff56a6b 100644
--- a/core/src/main/scala/spark/BlockStoreShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
-import spark.executor.{ShuffleReadMetrics, TaskMetrics}
-import spark.serializer.Serializer
-import spark.storage.BlockManagerId
-import spark.util.CompletionIterator
+import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics}
+import org.apache.spark.serializer.Serializer
+import org.apache.spark.storage.BlockManagerId
+import org.apache.spark.util.CompletionIterator
private[spark] class BlockStoreShuffleFetcher extends ShuffleFetcher with Logging {
diff --git a/core/src/main/scala/spark/CacheManager.scala b/core/src/main/scala/org/apache/spark/CacheManager.scala
index 81314805a9..42e465b9d8 100644
--- a/core/src/main/scala/spark/CacheManager.scala
+++ b/core/src/main/scala/org/apache/spark/CacheManager.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.mutable.{ArrayBuffer, HashSet}
-import spark.storage.{BlockManager, StorageLevel}
+import org.apache.spark.storage.{BlockManager, StorageLevel}
/** Spark class responsible for passing RDDs split contents to the BlockManager and making
diff --git a/core/src/main/scala/spark/ClosureCleaner.scala b/core/src/main/scala/org/apache/spark/ClosureCleaner.scala
index 8b39241095..71d9e62d4f 100644
--- a/core/src/main/scala/spark/ClosureCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/ClosureCleaner.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.lang.reflect.Field
diff --git a/core/src/main/scala/spark/Dependency.scala b/core/src/main/scala/org/apache/spark/Dependency.scala
index d5a9606570..cc3c2474a6 100644
--- a/core/src/main/scala/spark/Dependency.scala
+++ b/core/src/main/scala/org/apache/spark/Dependency.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
/**
* Base class for dependencies.
diff --git a/core/src/main/scala/spark/DoubleRDDFunctions.scala b/core/src/main/scala/org/apache/spark/DoubleRDDFunctions.scala
index 104168e61c..dd344491b8 100644
--- a/core/src/main/scala/spark/DoubleRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/DoubleRDDFunctions.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
-import spark.partial.BoundedDouble
-import spark.partial.MeanEvaluator
-import spark.partial.PartialResult
-import spark.partial.SumEvaluator
-import spark.util.StatCounter
+import org.apache.spark.partial.BoundedDouble
+import org.apache.spark.partial.MeanEvaluator
+import org.apache.spark.partial.PartialResult
+import org.apache.spark.partial.SumEvaluator
+import org.apache.spark.util.StatCounter
/**
* Extra functions available on RDDs of Doubles through an implicit conversion.
@@ -34,7 +34,7 @@ class DoubleRDDFunctions(self: RDD[Double]) extends Logging with Serializable {
}
/**
- * Return a [[spark.util.StatCounter]] object that captures the mean, variance and count
+ * Return a [[org.apache.spark.util.StatCounter]] object that captures the mean, variance and count
* of the RDD's elements in one operation.
*/
def stats(): StatCounter = {
diff --git a/core/src/main/scala/spark/FetchFailedException.scala b/core/src/main/scala/org/apache/spark/FetchFailedException.scala
index a2dae6cae9..d242047502 100644
--- a/core/src/main/scala/spark/FetchFailedException.scala
+++ b/core/src/main/scala/org/apache/spark/FetchFailedException.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
-import spark.storage.BlockManagerId
+import org.apache.spark.storage.BlockManagerId
private[spark] class FetchFailedException(
taskEndReason: TaskEndReason,
diff --git a/core/src/main/scala/spark/HttpFileServer.scala b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
index a13a7a2859..9b3a896648 100644
--- a/core/src/main/scala/spark/HttpFileServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpFileServer.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io.{File}
import com.google.common.io.Files
diff --git a/core/src/main/scala/spark/HttpServer.scala b/core/src/main/scala/org/apache/spark/HttpServer.scala
index c9dffbc631..db36c7c9dd 100644
--- a/core/src/main/scala/spark/HttpServer.scala
+++ b/core/src/main/scala/org/apache/spark/HttpServer.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io.File
import java.net.InetAddress
diff --git a/core/src/main/scala/spark/JavaSerializer.scala b/core/src/main/scala/org/apache/spark/JavaSerializer.scala
index 04c5f44e6b..f43396cb6b 100644
--- a/core/src/main/scala/spark/JavaSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/JavaSerializer.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io._
import java.nio.ByteBuffer
import serializer.{Serializer, SerializerInstance, DeserializationStream, SerializationStream}
-import spark.util.ByteBufferInputStream
+import org.apache.spark.util.ByteBufferInputStream
private[spark] class JavaSerializationStream(out: OutputStream) extends SerializationStream {
val objOut = new ObjectOutputStream(out)
diff --git a/core/src/main/scala/spark/KryoSerializer.scala b/core/src/main/scala/org/apache/spark/KryoSerializer.scala
index eeb2993d8a..db86e6db43 100644
--- a/core/src/main/scala/spark/KryoSerializer.scala
+++ b/core/src/main/scala/org/apache/spark/KryoSerializer.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io._
import java.nio.ByteBuffer
@@ -24,8 +24,8 @@ import com.esotericsoftware.kryo.io.{Input => KryoInput, Output => KryoOutput}
import com.esotericsoftware.kryo.serializers.{JavaSerializer => KryoJavaSerializer}
import com.twitter.chill.ScalaKryoInstantiator
import serializer.{SerializerInstance, DeserializationStream, SerializationStream}
-import spark.broadcast._
-import spark.storage._
+import org.apache.spark.broadcast._
+import org.apache.spark.storage._
private[spark]
class KryoSerializationStream(kryo: Kryo, outStream: OutputStream) extends SerializationStream {
@@ -104,7 +104,7 @@ trait KryoRegistrator {
/**
* A Spark serializer that uses the [[http://code.google.com/p/kryo/wiki/V1Documentation Kryo 1.x library]].
*/
-class KryoSerializer extends spark.serializer.Serializer with Logging {
+class KryoSerializer extends org.apache.spark.serializer.Serializer with Logging {
private val bufferSize = System.getProperty("spark.kryoserializer.buffer.mb", "2").toInt * 1024 * 1024
def newKryoOutput() = new KryoOutput(bufferSize)
@@ -153,4 +153,4 @@ class KryoSerializer extends spark.serializer.Serializer with Logging {
def newInstance(): SerializerInstance = {
new KryoSerializerInstance(this)
}
-} \ No newline at end of file
+}
diff --git a/core/src/main/scala/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala
index 79b0362830..6a973ea495 100644
--- a/core/src/main/scala/spark/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/Logging.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.slf4j.Logger
import org.slf4j.LoggerFactory
diff --git a/core/src/main/scala/spark/MapOutputTracker.scala b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
index 0cd0341a72..0f422d910a 100644
--- a/core/src/main/scala/spark/MapOutputTracker.scala
+++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io._
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
@@ -30,9 +30,9 @@ import akka.remote._
import akka.util.Duration
-import spark.scheduler.MapStatus
-import spark.storage.BlockManagerId
-import spark.util.{MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.scheduler.MapStatus
+import org.apache.spark.storage.BlockManagerId
+import org.apache.spark.util.{MetadataCleaner, TimeStampedHashMap}
private[spark] sealed trait MapOutputTrackerMessage
diff --git a/core/src/main/scala/spark/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/PairRDDFunctions.scala
index cc1285dd95..d046e7c1a4 100644
--- a/core/src/main/scala/spark/PairRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/PairRDDFunctions.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.nio.ByteBuffer
import java.util.{Date, HashMap => JHashMap}
@@ -40,11 +40,11 @@ import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat,
RecordWriter => NewRecordWriter, Job => NewAPIHadoopJob, SparkHadoopMapReduceUtil}
import org.apache.hadoop.security.UserGroupInformation
-import spark.partial.BoundedDouble
-import spark.partial.PartialResult
-import spark.rdd._
-import spark.SparkContext._
-import spark.Partitioner._
+import org.apache.spark.partial.BoundedDouble
+import org.apache.spark.partial.PartialResult
+import org.apache.spark.rdd._
+import org.apache.spark.SparkContext._
+import org.apache.spark.Partitioner._
/**
* Extra functions available on RDDs of (key, value) pairs through an implicit conversion.
@@ -559,7 +559,7 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest](self: RDD[(K, V)])
val formatter = new SimpleDateFormat("yyyyMMddHHmm")
val jobtrackerID = formatter.format(new Date())
val stageId = self.id
- def writeShard(context: spark.TaskContext, iter: Iterator[(K,V)]): Int = {
+ def writeShard(context: TaskContext, iter: Iterator[(K,V)]): Int = {
// Hadoop wants a 32-bit task attempt ID, so if ours is bigger than Int.MaxValue, roll it
// around by taking a mod. We expect that no task will be attempted 2 billion times.
val attemptNumber = (context.attemptId % Int.MaxValue).toInt
@@ -571,7 +571,7 @@ class PairRDDFunctions[K: ClassManifest, V: ClassManifest](self: RDD[(K, V)])
committer.setupTask(hadoopContext)
val writer = format.getRecordWriter(hadoopContext).asInstanceOf[NewRecordWriter[K,V]]
while (iter.hasNext) {
- val (k, v) = iter.next
+ val (k, v) = iter.next()
writer.write(k, v)
}
writer.close(hadoopContext)
diff --git a/core/src/main/scala/spark/Partition.scala b/core/src/main/scala/org/apache/spark/Partition.scala
index 2a4edcec98..87914a061f 100644
--- a/core/src/main/scala/spark/Partition.scala
+++ b/core/src/main/scala/org/apache/spark/Partition.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
/**
* A partition of an RDD.
diff --git a/core/src/main/scala/spark/Partitioner.scala b/core/src/main/scala/org/apache/spark/Partitioner.scala
index 65da8235d7..4dce2607b0 100644
--- a/core/src/main/scala/spark/Partitioner.scala
+++ b/core/src/main/scala/org/apache/spark/Partitioner.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
/**
* An object that defines how the elements in a key-value pair RDD are partitioned by key.
@@ -56,7 +56,7 @@ object Partitioner {
}
/**
- * A [[spark.Partitioner]] that implements hash-based partitioning using Java's `Object.hashCode`.
+ * A [[org.apache.spark.Partitioner]] that implements hash-based partitioning using Java's `Object.hashCode`.
*
* Java arrays have hashCodes that are based on the arrays' identities rather than their contents,
* so attempting to partition an RDD[Array[_]] or RDD[(Array[_], _)] using a HashPartitioner will
@@ -79,7 +79,7 @@ class HashPartitioner(partitions: Int) extends Partitioner {
}
/**
- * A [[spark.Partitioner]] that partitions sortable records by range into roughly equal ranges.
+ * A [[org.apache.spark.Partitioner]] that partitions sortable records by range into roughly equal ranges.
* Determines the ranges by sampling the RDD passed in.
*/
class RangePartitioner[K <% Ordered[K]: ClassManifest, V](
diff --git a/core/src/main/scala/spark/RDD.scala b/core/src/main/scala/org/apache/spark/RDD.scala
index 25a6951732..0d1f07f76c 100644
--- a/core/src/main/scala/spark/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/RDD.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.util.Random
@@ -31,30 +31,30 @@ import org.apache.hadoop.mapred.TextOutputFormat
import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
-import spark.Partitioner._
-import spark.api.java.JavaRDD
-import spark.partial.BoundedDouble
-import spark.partial.CountEvaluator
-import spark.partial.GroupedCountEvaluator
-import spark.partial.PartialResult
-import spark.rdd.CoalescedRDD
-import spark.rdd.CartesianRDD
-import spark.rdd.FilteredRDD
-import spark.rdd.FlatMappedRDD
-import spark.rdd.GlommedRDD
-import spark.rdd.MappedRDD
-import spark.rdd.MapPartitionsRDD
-import spark.rdd.MapPartitionsWithIndexRDD
-import spark.rdd.PipedRDD
-import spark.rdd.SampledRDD
-import spark.rdd.ShuffledRDD
-import spark.rdd.UnionRDD
-import spark.rdd.ZippedRDD
-import spark.rdd.ZippedPartitionsRDD2
-import spark.rdd.ZippedPartitionsRDD3
-import spark.rdd.ZippedPartitionsRDD4
-import spark.storage.StorageLevel
-import spark.util.BoundedPriorityQueue
+import org.apache.spark.Partitioner._
+import org.apache.spark.api.java.JavaRDD
+import org.apache.spark.partial.BoundedDouble
+import org.apache.spark.partial.CountEvaluator
+import org.apache.spark.partial.GroupedCountEvaluator
+import org.apache.spark.partial.PartialResult
+import org.apache.spark.rdd.CoalescedRDD
+import org.apache.spark.rdd.CartesianRDD
+import org.apache.spark.rdd.FilteredRDD
+import org.apache.spark.rdd.FlatMappedRDD
+import org.apache.spark.rdd.GlommedRDD
+import org.apache.spark.rdd.MappedRDD
+import org.apache.spark.rdd.MapPartitionsRDD
+import org.apache.spark.rdd.MapPartitionsWithIndexRDD
+import org.apache.spark.rdd.PipedRDD
+import org.apache.spark.rdd.SampledRDD
+import org.apache.spark.rdd.ShuffledRDD
+import org.apache.spark.rdd.UnionRDD
+import org.apache.spark.rdd.ZippedRDD
+import org.apache.spark.rdd.ZippedPartitionsRDD2
+import org.apache.spark.rdd.ZippedPartitionsRDD3
+import org.apache.spark.rdd.ZippedPartitionsRDD4
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.BoundedPriorityQueue
import SparkContext._
@@ -62,12 +62,12 @@ import SparkContext._
* A Resilient Distributed Dataset (RDD), the basic abstraction in Spark. Represents an immutable,
* partitioned collection of elements that can be operated on in parallel. This class contains the
* basic operations available on all RDDs, such as `map`, `filter`, and `persist`. In addition,
- * [[spark.PairRDDFunctions]] contains operations available only on RDDs of key-value pairs, such
- * as `groupByKey` and `join`; [[spark.DoubleRDDFunctions]] contains operations available only on
- * RDDs of Doubles; and [[spark.SequenceFileRDDFunctions]] contains operations available on RDDs
+ * [[org.apache.spark.PairRDDFunctions]] contains operations available only on RDDs of key-value pairs, such
+ * as `groupByKey` and `join`; [[org.apache.spark.DoubleRDDFunctions]] contains operations available only on
+ * RDDs of Doubles; and [[org.apache.spark.SequenceFileRDDFunctions]] contains operations available on RDDs
* that can be saved as SequenceFiles. These operations are automatically available on any RDD of
* the right type (e.g. RDD[(Int, Int)] through implicit conversions when you
- * `import spark.SparkContext._`.
+ * `import org.apache.spark.SparkContext._`.
*
* Internally, each RDD is characterized by five main properties:
*
@@ -893,7 +893,7 @@ abstract class RDD[T: ClassManifest](
dependencies.head.rdd.asInstanceOf[RDD[U]]
}
- /** The [[spark.SparkContext]] that this RDD was created on. */
+ /** The [[org.apache.spark.SparkContext]] that this RDD was created on. */
def context = sc
// Avoid handling doCheckpoint multiple times to prevent excessive recursion
@@ -929,7 +929,7 @@ abstract class RDD[T: ClassManifest](
* Clears the dependencies of this RDD. This method must ensure that all references
* to the original parent RDDs is removed to enable the parent RDDs to be garbage
* collected. Subclasses of RDD may override this method for implementing their own cleaning
- * logic. See [[spark.rdd.UnionRDD]] for an example.
+ * logic. See [[org.apache.spark.rdd.UnionRDD]] for an example.
*/
protected def clearDependencies() {
dependencies_ = null
diff --git a/core/src/main/scala/spark/RDDCheckpointData.scala b/core/src/main/scala/org/apache/spark/RDDCheckpointData.scala
index b615f820eb..0334de6924 100644
--- a/core/src/main/scala/spark/RDDCheckpointData.scala
+++ b/core/src/main/scala/org/apache/spark/RDDCheckpointData.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.apache.hadoop.fs.Path
import org.apache.hadoop.conf.Configuration
diff --git a/core/src/main/scala/spark/SequenceFileRDDFunctions.scala b/core/src/main/scala/org/apache/spark/SequenceFileRDDFunctions.scala
index 9f30b7f22f..d58fb4e4bc 100644
--- a/core/src/main/scala/spark/SequenceFileRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/SequenceFileRDDFunctions.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io.EOFException
import java.net.URL
@@ -41,7 +41,7 @@ import org.apache.hadoop.io.NullWritable
import org.apache.hadoop.io.BytesWritable
import org.apache.hadoop.io.Text
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
/**
* Extra functions available on RDDs of (key, value) pairs to create a Hadoop SequenceFile,
diff --git a/core/src/main/scala/spark/SerializableWritable.scala b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
index 936d8e6241..fdd4c24e23 100644
--- a/core/src/main/scala/spark/SerializableWritable.scala
+++ b/core/src/main/scala/org/apache/spark/SerializableWritable.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io._
diff --git a/core/src/main/scala/spark/ShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
index a6839cf7a4..307c383a89 100644
--- a/core/src/main/scala/spark/ShuffleFetcher.scala
+++ b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
-import spark.executor.TaskMetrics
-import spark.serializer.Serializer
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.serializer.Serializer
private[spark] abstract class ShuffleFetcher {
diff --git a/core/src/main/scala/spark/SizeEstimator.scala b/core/src/main/scala/org/apache/spark/SizeEstimator.scala
index 6cc57566d7..4bfc837710 100644
--- a/core/src/main/scala/spark/SizeEstimator.scala
+++ b/core/src/main/scala/org/apache/spark/SizeEstimator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.lang.reflect.Field
import java.lang.reflect.Modifier
diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 7ce9505b9c..1207b242bc 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io._
import java.net.URI
@@ -52,22 +52,22 @@ import org.apache.hadoop.mapreduce.lib.input.{FileInputFormat => NewFileInputFor
import org.apache.mesos.MesosNativeLibrary
-import spark.deploy.LocalSparkCluster
-import spark.partial.{ApproximateEvaluator, PartialResult}
-import spark.rdd.{CheckpointRDD, HadoopRDD, NewHadoopRDD, UnionRDD, ParallelCollectionRDD,
+import org.apache.spark.deploy.LocalSparkCluster
+import org.apache.spark.partial.{ApproximateEvaluator, PartialResult}
+import org.apache.spark.rdd.{CheckpointRDD, HadoopRDD, NewHadoopRDD, UnionRDD, ParallelCollectionRDD,
OrderedRDDFunctions}
-import spark.scheduler._
-import spark.scheduler.cluster.{StandaloneSchedulerBackend, SparkDeploySchedulerBackend,
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster.{StandaloneSchedulerBackend, SparkDeploySchedulerBackend,
ClusterScheduler, Schedulable, SchedulingMode}
-import spark.scheduler.local.LocalScheduler
-import spark.scheduler.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
-import spark.storage.{StorageStatus, StorageUtils, RDDInfo, BlockManagerSource}
-import spark.ui.SparkUI
-import spark.util.{MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.scheduler.local.LocalScheduler
+import org.apache.spark.scheduler.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
+import org.apache.spark.storage.{StorageStatus, StorageUtils, RDDInfo, BlockManagerSource}
+import org.apache.spark.ui.SparkUI
+import org.apache.spark.util.{MetadataCleaner, TimeStampedHashMap}
import scala.Some
-import spark.scheduler.StageInfo
-import spark.storage.RDDInfo
-import spark.storage.StorageStatus
+import org.apache.spark.scheduler.StageInfo
+import org.apache.spark.storage.RDDInfo
+import org.apache.spark.storage.StorageStatus
/**
* Main entry point for Spark functionality. A SparkContext represents the connection to a Spark
@@ -494,14 +494,14 @@ class SparkContext(
// Methods for creating shared variables
/**
- * Create an [[spark.Accumulator]] variable of a given type, which tasks can "add" values
+ * Create an [[org.apache.spark.Accumulator]] variable of a given type, which tasks can "add" values
* to using the `+=` method. Only the driver can access the accumulator's `value`.
*/
def accumulator[T](initialValue: T)(implicit param: AccumulatorParam[T]) =
new Accumulator(initialValue, param)
/**
- * Create an [[spark.Accumulable]] shared variable, to which tasks can add values with `+=`.
+ * Create an [[org.apache.spark.Accumulable]] shared variable, to which tasks can add values with `+=`.
* Only the driver can access the accumuable's `value`.
* @tparam T accumulator type
* @tparam R type that can be added to the accumulator
@@ -521,7 +521,7 @@ class SparkContext(
}
/**
- * Broadcast a read-only variable to the cluster, returning a [[spark.broadcast.Broadcast]] object for
+ * Broadcast a read-only variable to the cluster, returning a [[org.apache.spark.broadcast.Broadcast]] object for
* reading it in distributed functions. The variable will be sent to each cluster only once.
*/
def broadcast[T](value: T) = env.broadcastManager.newBroadcast[T](value, isLocal)
diff --git a/core/src/main/scala/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index 1f66e9cc7f..6e6fe5df6b 100644
--- a/core/src/main/scala/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import collection.mutable
import serializer.Serializer
@@ -23,15 +23,14 @@ import serializer.Serializer
import akka.actor.{Actor, ActorRef, Props, ActorSystemImpl, ActorSystem}
import akka.remote.RemoteActorRefProvider
-import spark.broadcast.BroadcastManager
-import spark.metrics.MetricsSystem
-import spark.deploy.SparkHadoopUtil
-import spark.storage.BlockManager
-import spark.storage.BlockManagerMaster
-import spark.network.ConnectionManager
-import spark.serializer.{Serializer, SerializerManager}
-import spark.util.AkkaUtils
-import spark.api.python.PythonWorkerFactory
+import org.apache.spark.broadcast.BroadcastManager
+import org.apache.spark.metrics.MetricsSystem
+import org.apache.spark.deploy.SparkHadoopUtil
+import org.apache.spark.storage.{BlockManagerMasterActor, BlockManager, BlockManagerMaster}
+import org.apache.spark.network.ConnectionManager
+import org.apache.spark.serializer.{Serializer, SerializerManager}
+import org.apache.spark.util.AkkaUtils
+import org.apache.spark.api.python.PythonWorkerFactory
/**
@@ -156,10 +155,10 @@ object SparkEnv extends Logging {
val serializerManager = new SerializerManager
val serializer = serializerManager.setDefault(
- System.getProperty("spark.serializer", "spark.JavaSerializer"))
+ System.getProperty("spark.serializer", "org.apache.spark.JavaSerializer"))
val closureSerializer = serializerManager.get(
- System.getProperty("spark.closure.serializer", "spark.JavaSerializer"))
+ System.getProperty("spark.closure.serializer", "org.apache.spark.JavaSerializer"))
def registerOrLookup(name: String, newActor: => Actor): ActorRef = {
if (isDriver) {
@@ -177,7 +176,7 @@ object SparkEnv extends Logging {
val blockManagerMaster = new BlockManagerMaster(registerOrLookup(
"BlockManagerMaster",
- new spark.storage.BlockManagerMasterActor(isLocal)))
+ new BlockManagerMasterActor(isLocal)))
val blockManager = new BlockManager(executorId, actorSystem, blockManagerMaster, serializer)
val connectionManager = blockManager.connectionManager
@@ -194,7 +193,7 @@ object SparkEnv extends Logging {
new MapOutputTrackerActor(mapOutputTracker))
val shuffleFetcher = instantiateClass[ShuffleFetcher](
- "spark.shuffle.fetcher", "spark.BlockStoreShuffleFetcher")
+ "spark.shuffle.fetcher", "org.apache.spark.BlockStoreShuffleFetcher")
val httpFileServer = new HttpFileServer()
httpFileServer.initialize()
diff --git a/core/src/main/scala/spark/SparkException.scala b/core/src/main/scala/org/apache/spark/SparkException.scala
index b7045eea63..d34e47e8ca 100644
--- a/core/src/main/scala/spark/SparkException.scala
+++ b/core/src/main/scala/org/apache/spark/SparkException.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
class SparkException(message: String, cause: Throwable)
extends Exception(message, cause) {
diff --git a/core/src/main/scala/spark/SparkFiles.java b/core/src/main/scala/org/apache/spark/SparkFiles.java
index f9b3f7965e..af9cf85e37 100644
--- a/core/src/main/scala/spark/SparkFiles.java
+++ b/core/src/main/scala/org/apache/spark/SparkFiles.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark;
+package org.apache.spark;
import java.io.File;
diff --git a/core/src/main/scala/spark/SparkHadoopWriter.scala b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
index 6b330ef572..2bab9d6e3d 100644
--- a/core/src/main/scala/spark/SparkHadoopWriter.scala
+++ b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala
@@ -25,8 +25,8 @@ import java.text.NumberFormat
import java.io.IOException
import java.util.Date
-import spark.Logging
-import spark.SerializableWritable
+import org.apache.spark.Logging
+import org.apache.spark.SerializableWritable
/**
* Internal helper class that saves an RDD using a Hadoop OutputFormat. This is only public
diff --git a/core/src/main/scala/spark/TaskContext.scala b/core/src/main/scala/org/apache/spark/TaskContext.scala
index b79f4ca813..b2dd668330 100644
--- a/core/src/main/scala/spark/TaskContext.scala
+++ b/core/src/main/scala/org/apache/spark/TaskContext.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import executor.TaskMetrics
import scala.collection.mutable.ArrayBuffer
diff --git a/core/src/main/scala/spark/TaskEndReason.scala b/core/src/main/scala/org/apache/spark/TaskEndReason.scala
index 3ad665da34..03bf268863 100644
--- a/core/src/main/scala/spark/TaskEndReason.scala
+++ b/core/src/main/scala/org/apache/spark/TaskEndReason.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
-import spark.executor.TaskMetrics
-import spark.storage.BlockManagerId
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.storage.BlockManagerId
/**
* Various possible reasons why a task ended. The low-level TaskScheduler is supposed to retry
diff --git a/core/src/main/scala/spark/TaskState.scala b/core/src/main/scala/org/apache/spark/TaskState.scala
index bf75753056..19ce8369d9 100644
--- a/core/src/main/scala/spark/TaskState.scala
+++ b/core/src/main/scala/org/apache/spark/TaskState.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.apache.mesos.Protos.{TaskState => MesosTaskState}
diff --git a/core/src/main/scala/spark/Utils.scala b/core/src/main/scala/org/apache/spark/Utils.scala
index bb8aad3f4c..1e17deb010 100644
--- a/core/src/main/scala/spark/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/Utils.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io._
import java.net.{InetAddress, URL, URI, NetworkInterface, Inet4Address, ServerSocket}
@@ -33,8 +33,8 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder
import org.apache.hadoop.fs.{Path, FileSystem, FileUtil}
-import spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
-import spark.deploy.SparkHadoopUtil
+import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
+import org.apache.spark.deploy.SparkHadoopUtil
import java.nio.ByteBuffer
diff --git a/core/src/main/scala/spark/api/java/JavaDoubleRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
index 8ce7df6213..cb25ff728e 100644
--- a/core/src/main/scala/spark/api/java/JavaDoubleRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package spark.api.java
-
-import spark.RDD
-import spark.SparkContext.doubleRDDToDoubleRDDFunctions
-import spark.api.java.function.{Function => JFunction}
-import spark.util.StatCounter
-import spark.partial.{BoundedDouble, PartialResult}
-import spark.storage.StorageLevel
+package org.apache.spark.api.java
+
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext.doubleRDDToDoubleRDDFunctions
+import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.util.StatCounter
+import org.apache.spark.partial.{BoundedDouble, PartialResult}
+import org.apache.spark.storage.StorageLevel
import java.lang.Double
-import spark.Partitioner
+import org.apache.spark.Partitioner
class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, JavaDoubleRDD] {
@@ -119,7 +119,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav
def sum(): Double = srdd.sum()
/**
- * Return a [[spark.util.StatCounter]] object that captures the mean, variance and count
+ * Return a [[org.apache.spark.util.StatCounter]] object that captures the mean, variance and count
* of the RDD's elements in one operation.
*/
def stats(): StatCounter = srdd.stats()
diff --git a/core/src/main/scala/spark/api/java/JavaPairRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala
index effe6e5e0d..09da35aee6 100644
--- a/core/src/main/scala/spark/api/java/JavaPairRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaPairRDD.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java
+package org.apache.spark.api.java
import java.util.{List => JList}
import java.util.Comparator
@@ -30,17 +30,17 @@ import org.apache.hadoop.mapred.OutputFormat
import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat}
import org.apache.hadoop.conf.Configuration
-import spark.HashPartitioner
-import spark.Partitioner
-import spark.Partitioner._
-import spark.RDD
-import spark.SparkContext.rddToPairRDDFunctions
-import spark.api.java.function.{Function2 => JFunction2}
-import spark.api.java.function.{Function => JFunction}
-import spark.partial.BoundedDouble
-import spark.partial.PartialResult
-import spark.rdd.OrderedRDDFunctions
-import spark.storage.StorageLevel
+import org.apache.spark.HashPartitioner
+import org.apache.spark.Partitioner
+import org.apache.spark.Partitioner._
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext.rddToPairRDDFunctions
+import org.apache.spark.api.java.function.{Function2 => JFunction2}
+import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.partial.BoundedDouble
+import org.apache.spark.partial.PartialResult
+import org.apache.spark.rdd.OrderedRDDFunctions
+import org.apache.spark.storage.StorageLevel
class JavaPairRDD[K, V](val rdd: RDD[(K, V)])(implicit val kManifest: ClassManifest[K],
diff --git a/core/src/main/scala/spark/api/java/JavaRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
index c0bf2cf568..68cfcf5999 100644
--- a/core/src/main/scala/spark/api/java/JavaRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.api.java
+package org.apache.spark.api.java
-import spark._
-import spark.api.java.function.{Function => JFunction}
-import spark.storage.StorageLevel
+import org.apache.spark._
+import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.storage.StorageLevel
class JavaRDD[T](val rdd: RDD[T])(implicit val classManifest: ClassManifest[T]) extends
JavaRDDLike[T, JavaRDD[T]] {
diff --git a/core/src/main/scala/spark/api/java/JavaRDDLike.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
index 2c2b138f16..1ad8514980 100644
--- a/core/src/main/scala/spark/api/java/JavaRDDLike.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -15,18 +15,18 @@
* limitations under the License.
*/
-package spark.api.java
+package org.apache.spark.api.java
import java.util.{List => JList, Comparator}
import scala.Tuple2
import scala.collection.JavaConversions._
import org.apache.hadoop.io.compress.CompressionCodec
-import spark.{SparkContext, Partition, RDD, TaskContext}
-import spark.api.java.JavaPairRDD._
-import spark.api.java.function.{Function2 => JFunction2, Function => JFunction, _}
-import spark.partial.{PartialResult, BoundedDouble}
-import spark.storage.StorageLevel
+import org.apache.spark.{SparkContext, Partition, RDD, TaskContext}
+import org.apache.spark.api.java.JavaPairRDD._
+import org.apache.spark.api.java.function.{Function2 => JFunction2, Function => JFunction, _}
+import org.apache.spark.partial.{PartialResult, BoundedDouble}
+import org.apache.spark.storage.StorageLevel
import com.google.common.base.Optional
@@ -40,7 +40,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
/** Set of partitions in this RDD. */
def splits: JList[Partition] = new java.util.ArrayList(rdd.partitions.toSeq)
- /** The [[spark.SparkContext]] that this RDD was created on. */
+ /** The [[org.apache.spark.SparkContext]] that this RDD was created on. */
def context: SparkContext = rdd.context
/** A unique ID for this RDD (within its SparkContext). */
diff --git a/core/src/main/scala/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
index 29d57004b5..618a7b3bf7 100644
--- a/core/src/main/scala/spark/api/java/JavaSparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java
+package org.apache.spark.api.java
import java.util.{Map => JMap}
@@ -27,15 +27,15 @@ import org.apache.hadoop.mapred.InputFormat
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
-import spark.{Accumulable, AccumulableParam, Accumulator, AccumulatorParam, RDD, SparkContext}
-import spark.SparkContext.IntAccumulatorParam
-import spark.SparkContext.DoubleAccumulatorParam
-import spark.broadcast.Broadcast
+import org.apache.spark.{Accumulable, AccumulableParam, Accumulator, AccumulatorParam, RDD, SparkContext}
+import org.apache.spark.SparkContext.IntAccumulatorParam
+import org.apache.spark.SparkContext.DoubleAccumulatorParam
+import org.apache.spark.broadcast.Broadcast
import com.google.common.base.Optional
/**
- * A Java-friendly version of [[spark.SparkContext]] that returns [[spark.api.java.JavaRDD]]s and
+ * A Java-friendly version of [[org.apache.spark.SparkContext]] that returns [[org.apache.spark.api.java.JavaRDD]]s and
* works with Java collections instead of Scala ones.
*/
class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWorkaround {
@@ -283,48 +283,48 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
}
/**
- * Create an [[spark.Accumulator]] integer variable, which tasks can "add" values
+ * Create an [[org.apache.spark.Accumulator]] integer variable, which tasks can "add" values
* to using the `add` method. Only the master can access the accumulator's `value`.
*/
def intAccumulator(initialValue: Int): Accumulator[java.lang.Integer] =
sc.accumulator(initialValue)(IntAccumulatorParam).asInstanceOf[Accumulator[java.lang.Integer]]
/**
- * Create an [[spark.Accumulator]] double variable, which tasks can "add" values
+ * Create an [[org.apache.spark.Accumulator]] double variable, which tasks can "add" values
* to using the `add` method. Only the master can access the accumulator's `value`.
*/
def doubleAccumulator(initialValue: Double): Accumulator[java.lang.Double] =
sc.accumulator(initialValue)(DoubleAccumulatorParam).asInstanceOf[Accumulator[java.lang.Double]]
/**
- * Create an [[spark.Accumulator]] integer variable, which tasks can "add" values
+ * Create an [[org.apache.spark.Accumulator]] integer variable, which tasks can "add" values
* to using the `add` method. Only the master can access the accumulator's `value`.
*/
def accumulator(initialValue: Int): Accumulator[java.lang.Integer] = intAccumulator(initialValue)
/**
- * Create an [[spark.Accumulator]] double variable, which tasks can "add" values
+ * Create an [[org.apache.spark.Accumulator]] double variable, which tasks can "add" values
* to using the `add` method. Only the master can access the accumulator's `value`.
*/
def accumulator(initialValue: Double): Accumulator[java.lang.Double] =
doubleAccumulator(initialValue)
/**
- * Create an [[spark.Accumulator]] variable of a given type, which tasks can "add" values
+ * Create an [[org.apache.spark.Accumulator]] variable of a given type, which tasks can "add" values
* to using the `add` method. Only the master can access the accumulator's `value`.
*/
def accumulator[T](initialValue: T, accumulatorParam: AccumulatorParam[T]): Accumulator[T] =
sc.accumulator(initialValue)(accumulatorParam)
/**
- * Create an [[spark.Accumulable]] shared variable of the given type, to which tasks can
+ * Create an [[org.apache.spark.Accumulable]] shared variable of the given type, to which tasks can
* "add" values with `add`. Only the master can access the accumuable's `value`.
*/
def accumulable[T, R](initialValue: T, param: AccumulableParam[T, R]): Accumulable[T, R] =
sc.accumulable(initialValue)(param)
/**
- * Broadcast a read-only variable to the cluster, returning a [[spark.Broadcast]] object for
+ * Broadcast a read-only variable to the cluster, returning a [[org.apache.spark.Broadcast]] object for
* reading it in distributed functions. The variable will be sent to each cluster only once.
*/
def broadcast[T](value: T): Broadcast[T] = sc.broadcast(value)
diff --git a/core/src/main/scala/spark/api/java/JavaSparkContextVarargsWorkaround.java b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
index 42b1de01b1..c9cbce5624 100644
--- a/core/src/main/scala/spark/api/java/JavaSparkContextVarargsWorkaround.java
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContextVarargsWorkaround.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java;
+package org.apache.spark.api.java;
import java.util.Arrays;
import java.util.ArrayList;
diff --git a/core/src/main/scala/spark/api/java/JavaUtils.scala b/core/src/main/scala/org/apache/spark/api/java/JavaUtils.scala
index ffc131ac83..ecbf18849a 100644
--- a/core/src/main/scala/spark/api/java/JavaUtils.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaUtils.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java
+package org.apache.spark.api.java
import com.google.common.base.Optional
diff --git a/core/src/main/scala/spark/api/java/StorageLevels.java b/core/src/main/scala/org/apache/spark/api/java/StorageLevels.java
index f385636e83..0744269773 100644
--- a/core/src/main/scala/spark/api/java/StorageLevels.java
+++ b/core/src/main/scala/org/apache/spark/api/java/StorageLevels.java
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.api.java;
+package org.apache.spark.api.java;
-import spark.storage.StorageLevel;
+import org.apache.spark.storage.StorageLevel;
/**
* Expose some commonly useful storage level constants.
diff --git a/core/src/main/scala/spark/api/java/function/DoubleFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
index 8bc88d757f..4830067f7a 100644
--- a/core/src/main/scala/spark/api/java/function/DoubleFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
import scala.runtime.AbstractFunction1;
diff --git a/core/src/main/scala/spark/api/java/function/DoubleFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
index 1aa1e5dae0..db34cd190a 100644
--- a/core/src/main/scala/spark/api/java/function/DoubleFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
import scala.runtime.AbstractFunction1;
diff --git a/core/src/main/scala/spark/api/java/function/FlatMapFunction.scala b/core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction.scala
index 9eb0cfe3f9..158539a846 100644
--- a/core/src/main/scala/spark/api/java/function/FlatMapFunction.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function
+package org.apache.spark.api.java.function
/**
* A function that returns zero or more output records from each input record.
diff --git a/core/src/main/scala/spark/api/java/function/FlatMapFunction2.scala b/core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction2.scala
index dda98710c2..5ef6a814f5 100644
--- a/core/src/main/scala/spark/api/java/function/FlatMapFunction2.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/FlatMapFunction2.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function
+package org.apache.spark.api.java.function
/**
* A function that takes two inputs and returns zero or more output records.
diff --git a/core/src/main/scala/spark/api/java/function/Function.java b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
index 2a2ea0aacf..b9070cfd83 100644
--- a/core/src/main/scala/spark/api/java/function/Function.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
import scala.reflect.ClassManifest;
import scala.reflect.ClassManifest$;
diff --git a/core/src/main/scala/spark/api/java/function/Function2.java b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
index 952d31ece4..d4c9154869 100644
--- a/core/src/main/scala/spark/api/java/function/Function2.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
import scala.reflect.ClassManifest;
import scala.reflect.ClassManifest$;
diff --git a/core/src/main/scala/spark/api/java/function/PairFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
index 4aad602da3..c0e5544b7d 100644
--- a/core/src/main/scala/spark/api/java/function/PairFlatMapFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
import scala.Tuple2;
import scala.reflect.ClassManifest;
diff --git a/core/src/main/scala/spark/api/java/function/PairFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
index ccfe64ecf1..40480fe8e8 100644
--- a/core/src/main/scala/spark/api/java/function/PairFunction.java
+++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function;
+package org.apache.spark.api.java.function;
import scala.Tuple2;
import scala.reflect.ClassManifest;
diff --git a/core/src/main/scala/spark/api/java/function/VoidFunction.scala b/core/src/main/scala/org/apache/spark/api/java/function/VoidFunction.scala
index f6fc0b0f7d..ea94313a4a 100644
--- a/core/src/main/scala/spark/api/java/function/VoidFunction.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/VoidFunction.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function
+package org.apache.spark.api.java.function
/**
* A function with no return value.
diff --git a/core/src/main/scala/spark/api/java/function/WrappedFunction1.scala b/core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction1.scala
index 1758a38c4e..cfe694f65d 100644
--- a/core/src/main/scala/spark/api/java/function/WrappedFunction1.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction1.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function
+package org.apache.spark.api.java.function
import scala.runtime.AbstractFunction1
diff --git a/core/src/main/scala/spark/api/java/function/WrappedFunction2.scala b/core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction2.scala
index b093567d2c..eb9277c6fb 100644
--- a/core/src/main/scala/spark/api/java/function/WrappedFunction2.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/function/WrappedFunction2.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.java.function
+package org.apache.spark.api.java.function
import scala.runtime.AbstractFunction2
diff --git a/core/src/main/scala/spark/api/python/PythonPartitioner.scala b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
index ac112b8c2c..eea63d5a4e 100644
--- a/core/src/main/scala/spark/api/python/PythonPartitioner.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.api.python
+package org.apache.spark.api.python
-import spark.Partitioner
-import spark.Utils
+import org.apache.spark.Partitioner
+import org.apache.spark.Utils
import java.util.Arrays
/**
- * A [[spark.Partitioner]] that performs handling of byte arrays, for use by the Python API.
+ * A [[org.apache.spark.Partitioner]] that performs handling of byte arrays, for use by the Python API.
*
* Stores the unique id() of the Python-side partitioning function so that it is incorporated into
* equality comparisons. Correctness requires that the id is a unique identifier for the
diff --git a/core/src/main/scala/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index 49671437d0..621f0fe8ee 100644
--- a/core/src/main/scala/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.api.python
+package org.apache.spark.api.python
import java.io._
import java.net._
@@ -23,10 +23,10 @@ import java.util.{List => JList, ArrayList => JArrayList, Map => JMap, Collectio
import scala.collection.JavaConversions._
-import spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
-import spark.broadcast.Broadcast
-import spark._
-import spark.rdd.PipedRDD
+import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
+import org.apache.spark.broadcast.Broadcast
+import org.apache.spark._
+import org.apache.spark.rdd.PipedRDD
private[spark] class PythonRDD[T: ClassManifest](
@@ -298,7 +298,7 @@ private object Pickle {
val APPENDS: Byte = 'e'
}
-private class BytesToString extends spark.api.java.function.Function[Array[Byte], String] {
+private class BytesToString extends org.apache.spark.api.java.function.Function[Array[Byte], String] {
override def call(arr: Array[Byte]) : String = new String(arr, "UTF-8")
}
diff --git a/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
index 14f8320678..08e3f670f5 100644
--- a/core/src/main/scala/spark/api/python/PythonWorkerFactory.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.api.python
+package org.apache.spark.api.python
import java.io.{File, DataInputStream, IOException}
import java.net.{Socket, SocketException, InetAddress}
import scala.collection.JavaConversions._
-import spark._
+import org.apache.spark._
private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String, String])
extends Logging {
diff --git a/core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/BitTorrentBroadcast.scala
index 6f7d385379..99e86237fc 100644
--- a/core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/BitTorrentBroadcast.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.broadcast
+package org.apache.spark.broadcast
import java.io._
import java.net._
@@ -25,8 +25,8 @@ import java.util.concurrent.atomic.AtomicInteger
import scala.collection.mutable.{ListBuffer, Map, Set}
import scala.math
-import spark._
-import spark.storage.StorageLevel
+import org.apache.spark._
+import org.apache.spark.storage.StorageLevel
private[spark] class BitTorrentBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
extends Broadcast[T](id)
diff --git a/core/src/main/scala/spark/broadcast/Broadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala
index aba56a60ca..43c18294c5 100644
--- a/core/src/main/scala/spark/broadcast/Broadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/Broadcast.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.broadcast
+package org.apache.spark.broadcast
import java.io._
import java.util.concurrent.atomic.AtomicLong
-import spark._
+import org.apache.spark._
abstract class Broadcast[T](private[spark] val id: Long) extends Serializable {
def value: T
@@ -28,7 +28,7 @@ abstract class Broadcast[T](private[spark] val id: Long) extends Serializable {
// We cannot have an abstract readObject here due to some weird issues with
// readObject having to be 'private' in sub-classes.
- override def toString = "spark.Broadcast(" + id + ")"
+ override def toString = "Broadcast(" + id + ")"
}
private[spark]
@@ -44,7 +44,7 @@ class BroadcastManager(val _isDriver: Boolean) extends Logging with Serializable
synchronized {
if (!initialized) {
val broadcastFactoryClass = System.getProperty(
- "spark.broadcast.factory", "spark.broadcast.HttpBroadcastFactory")
+ "spark.broadcast.factory", "org.apache.spark.broadcast.HttpBroadcastFactory")
broadcastFactory =
Class.forName(broadcastFactoryClass).newInstance.asInstanceOf[BroadcastFactory]
diff --git a/core/src/main/scala/spark/broadcast/BroadcastFactory.scala b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
index d33d95c7d9..68bff75b90 100644
--- a/core/src/main/scala/spark/broadcast/BroadcastFactory.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.broadcast
+package org.apache.spark.broadcast
/**
* An interface for all the broadcast implementations in Spark (to allow
diff --git a/core/src/main/scala/spark/broadcast/HttpBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
index 138a8c21bc..7a52ff0769 100644
--- a/core/src/main/scala/spark/broadcast/HttpBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.broadcast
+package org.apache.spark.broadcast
import java.io.{File, FileOutputStream, ObjectInputStream, OutputStream}
import java.net.URL
@@ -23,10 +23,10 @@ import java.net.URL
import it.unimi.dsi.fastutil.io.FastBufferedInputStream
import it.unimi.dsi.fastutil.io.FastBufferedOutputStream
-import spark.{HttpServer, Logging, SparkEnv, Utils}
-import spark.io.CompressionCodec
-import spark.storage.StorageLevel
-import spark.util.{MetadataCleaner, TimeStampedHashSet}
+import org.apache.spark.{HttpServer, Logging, SparkEnv, Utils}
+import org.apache.spark.io.CompressionCodec
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.{MetadataCleaner, TimeStampedHashSet}
private[spark] class HttpBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
diff --git a/core/src/main/scala/spark/broadcast/MultiTracker.scala b/core/src/main/scala/org/apache/spark/broadcast/MultiTracker.scala
index 7855d44e9b..10b910df87 100644
--- a/core/src/main/scala/spark/broadcast/MultiTracker.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/MultiTracker.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.broadcast
+package org.apache.spark.broadcast
import java.io._
import java.net._
@@ -23,7 +23,7 @@ import java.util.Random
import scala.collection.mutable.Map
-import spark._
+import org.apache.spark._
private object MultiTracker
extends Logging {
diff --git a/core/src/main/scala/spark/broadcast/SourceInfo.scala b/core/src/main/scala/org/apache/spark/broadcast/SourceInfo.scala
index b17ae63b5c..baa1fd6da4 100644
--- a/core/src/main/scala/spark/broadcast/SourceInfo.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/SourceInfo.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.broadcast
+package org.apache.spark.broadcast
import java.util.BitSet
-import spark._
+import org.apache.spark._
/**
* Used to keep and pass around information of peers involved in a broadcast
diff --git a/core/src/main/scala/spark/broadcast/TreeBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/TreeBroadcast.scala
index ea1e9a12c1..b5a4ccc0ee 100644
--- a/core/src/main/scala/spark/broadcast/TreeBroadcast.scala
+++ b/core/src/main/scala/org/apache/spark/broadcast/TreeBroadcast.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.broadcast
+package org.apache.spark.broadcast
import java.io._
import java.net._
@@ -24,8 +24,8 @@ import java.util.{Comparator, Random, UUID}
import scala.collection.mutable.{ListBuffer, Map, Set}
import scala.math
-import spark._
-import spark.storage.StorageLevel
+import org.apache.spark._
+import org.apache.spark.storage.StorageLevel
private[spark] class TreeBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long)
extends Broadcast[T](id) with Logging with Serializable {
diff --git a/core/src/main/scala/spark/deploy/ApplicationDescription.scala b/core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
index a8b22fbef8..19d393a0db 100644
--- a/core/src/main/scala/spark/deploy/ApplicationDescription.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/ApplicationDescription.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy
+package org.apache.spark.deploy
private[spark] class ApplicationDescription(
val name: String,
diff --git a/core/src/main/scala/spark/deploy/Command.scala b/core/src/main/scala/org/apache/spark/deploy/Command.scala
index bad629e965..fa8af9a646 100644
--- a/core/src/main/scala/spark/deploy/Command.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/Command.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy
+package org.apache.spark.deploy
import scala.collection.Map
diff --git a/core/src/main/scala/spark/deploy/DeployMessage.scala b/core/src/main/scala/org/apache/spark/deploy/DeployMessage.scala
index 0db13ffc98..4dc6ada2d1 100644
--- a/core/src/main/scala/spark/deploy/DeployMessage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/DeployMessage.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.deploy
+package org.apache.spark.deploy
import scala.collection.immutable.List
-import spark.Utils
-import spark.deploy.ExecutorState.ExecutorState
-import spark.deploy.master.{WorkerInfo, ApplicationInfo}
-import spark.deploy.worker.ExecutorRunner
+import org.apache.spark.Utils
+import org.apache.spark.deploy.ExecutorState.ExecutorState
+import org.apache.spark.deploy.master.{WorkerInfo, ApplicationInfo}
+import org.apache.spark.deploy.worker.ExecutorRunner
private[deploy] sealed trait DeployMessage extends Serializable
diff --git a/core/src/main/scala/spark/deploy/ExecutorState.scala b/core/src/main/scala/org/apache/spark/deploy/ExecutorState.scala
index 08c9a3b725..fcfea96ad6 100644
--- a/core/src/main/scala/spark/deploy/ExecutorState.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/ExecutorState.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy
+package org.apache.spark.deploy
private[spark] object ExecutorState
extends Enumeration("LAUNCHING", "LOADING", "RUNNING", "KILLED", "FAILED", "LOST") {
diff --git a/core/src/main/scala/spark/deploy/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
index f8dcf025b4..a6be8efef1 100644
--- a/core/src/main/scala/spark/deploy/JsonProtocol.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.deploy
+package org.apache.spark.deploy
import net.liftweb.json.JsonDSL._
-import spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
-import spark.deploy.master.{ApplicationInfo, WorkerInfo}
-import spark.deploy.worker.ExecutorRunner
+import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
+import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo}
+import org.apache.spark.deploy.worker.ExecutorRunner
private[spark] object JsonProtocol {
diff --git a/core/src/main/scala/spark/deploy/LocalSparkCluster.scala b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
index 6b8e9f27af..af5a4110b0 100644
--- a/core/src/main/scala/spark/deploy/LocalSparkCluster.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.deploy
+package org.apache.spark.deploy
import akka.actor.{ActorRef, Props, Actor, ActorSystem, Terminated}
-import spark.deploy.worker.Worker
-import spark.deploy.master.Master
-import spark.util.AkkaUtils
-import spark.{Logging, Utils}
+import org.apache.spark.deploy.worker.Worker
+import org.apache.spark.deploy.master.Master
+import org.apache.spark.util.AkkaUtils
+import org.apache.spark.{Logging, Utils}
import scala.collection.mutable.ArrayBuffer
diff --git a/core/src/main/scala/spark/deploy/SparkHadoopUtil.scala b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 882161e669..0a5f4c368f 100644
--- a/core/src/main/scala/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy
+package org.apache.spark.deploy
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.mapred.JobConf
diff --git a/core/src/main/scala/spark/deploy/WebUI.scala b/core/src/main/scala/org/apache/spark/deploy/WebUI.scala
index 8ea7792ef4..ae258b58b9 100644
--- a/core/src/main/scala/spark/deploy/WebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/WebUI.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy
+package org.apache.spark.deploy
import java.text.SimpleDateFormat
import java.util.Date
diff --git a/core/src/main/scala/spark/deploy/client/Client.scala b/core/src/main/scala/org/apache/spark/deploy/client/Client.scala
index 9d5ba8a796..a342dd724a 100644
--- a/core/src/main/scala/spark/deploy/client/Client.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/Client.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.client
+package org.apache.spark.deploy.client
import java.util.concurrent.TimeoutException
@@ -28,10 +28,10 @@ import akka.remote.RemoteClientLifeCycleEvent
import akka.remote.RemoteClientShutdown
import akka.dispatch.Await
-import spark.Logging
-import spark.deploy.{ApplicationDescription, ExecutorState}
-import spark.deploy.DeployMessages._
-import spark.deploy.master.Master
+import org.apache.spark.Logging
+import org.apache.spark.deploy.{ApplicationDescription, ExecutorState}
+import org.apache.spark.deploy.DeployMessages._
+import org.apache.spark.deploy.master.Master
/**
diff --git a/core/src/main/scala/spark/deploy/client/ClientListener.scala b/core/src/main/scala/org/apache/spark/deploy/client/ClientListener.scala
index 064024455e..4605368c11 100644
--- a/core/src/main/scala/spark/deploy/client/ClientListener.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/ClientListener.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.client
+package org.apache.spark.deploy.client
/**
* Callbacks invoked by deploy client when various events happen. There are currently four events:
diff --git a/core/src/main/scala/spark/deploy/client/TestClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
index 4f4daa141a..0322029fbd 100644
--- a/core/src/main/scala/spark/deploy/client/TestClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.deploy.client
+package org.apache.spark.deploy.client
-import spark.util.AkkaUtils
-import spark.{Logging, Utils}
-import spark.deploy.{Command, ApplicationDescription}
+import org.apache.spark.util.AkkaUtils
+import org.apache.spark.{Logging, Utils}
+import org.apache.spark.deploy.{Command, ApplicationDescription}
private[spark] object TestClient {
diff --git a/core/src/main/scala/spark/deploy/client/TestExecutor.scala b/core/src/main/scala/org/apache/spark/deploy/client/TestExecutor.scala
index 8a22b6b89f..c5ac45c673 100644
--- a/core/src/main/scala/spark/deploy/client/TestExecutor.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/TestExecutor.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.client
+package org.apache.spark.deploy.client
private[spark] object TestExecutor {
def main(args: Array[String]) {
diff --git a/core/src/main/scala/spark/deploy/master/ApplicationInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
index 6dd2f06126..bd5327627a 100644
--- a/core/src/main/scala/spark/deploy/master/ApplicationInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.deploy.master
+package org.apache.spark.deploy.master
-import spark.deploy.ApplicationDescription
+import org.apache.spark.deploy.ApplicationDescription
import java.util.Date
import akka.actor.ActorRef
import scala.collection.mutable
diff --git a/core/src/main/scala/spark/deploy/master/ApplicationSource.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala
index 4df2b6bfdd..2d75ad5a2c 100644
--- a/core/src/main/scala/spark/deploy/master/ApplicationSource.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationSource.scala
@@ -1,8 +1,8 @@
-package spark.deploy.master
+package org.apache.spark.deploy.master
import com.codahale.metrics.{Gauge, MetricRegistry}
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
class ApplicationSource(val application: ApplicationInfo) extends Source {
val metricRegistry = new MetricRegistry()
diff --git a/core/src/main/scala/spark/deploy/master/ApplicationState.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationState.scala
index 94f0ad8bae..7e804223cf 100644
--- a/core/src/main/scala/spark/deploy/master/ApplicationState.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationState.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.master
+package org.apache.spark.deploy.master
private[spark] object ApplicationState
extends Enumeration("WAITING", "RUNNING", "FINISHED", "FAILED") {
diff --git a/core/src/main/scala/spark/deploy/master/ExecutorInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/ExecutorInfo.scala
index 99b60f7d09..cf384a985e 100644
--- a/core/src/main/scala/spark/deploy/master/ExecutorInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ExecutorInfo.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.deploy.master
+package org.apache.spark.deploy.master
-import spark.deploy.ExecutorState
+import org.apache.spark.deploy.ExecutorState
private[spark] class ExecutorInfo(
val id: Int,
diff --git a/core/src/main/scala/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
index 04af5e149c..869b2b2646 100644
--- a/core/src/main/scala/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.master
+package org.apache.spark.deploy.master
import java.text.SimpleDateFormat
import java.util.Date
@@ -27,12 +27,12 @@ import akka.actor.Terminated
import akka.remote.{RemoteClientLifeCycleEvent, RemoteClientDisconnected, RemoteClientShutdown}
import akka.util.duration._
-import spark.{Logging, SparkException, Utils}
-import spark.deploy.{ApplicationDescription, ExecutorState}
-import spark.deploy.DeployMessages._
-import spark.deploy.master.ui.MasterWebUI
-import spark.metrics.MetricsSystem
-import spark.util.AkkaUtils
+import org.apache.spark.{Logging, SparkException, Utils}
+import org.apache.spark.deploy.{ApplicationDescription, ExecutorState}
+import org.apache.spark.deploy.DeployMessages._
+import org.apache.spark.deploy.master.ui.MasterWebUI
+import org.apache.spark.metrics.MetricsSystem
+import org.apache.spark.util.AkkaUtils
private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Actor with Logging {
diff --git a/core/src/main/scala/spark/deploy/master/MasterArguments.scala b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
index 0ae0160767..c86cca278d 100644
--- a/core/src/main/scala/spark/deploy/master/MasterArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.deploy.master
+package org.apache.spark.deploy.master
-import spark.util.IntParam
-import spark.Utils
+import org.apache.spark.util.IntParam
+import org.apache.spark.Utils
/**
* Command-line parser for the master.
diff --git a/core/src/main/scala/spark/deploy/master/MasterSource.scala b/core/src/main/scala/org/apache/spark/deploy/master/MasterSource.scala
index b8cfa6a773..8dd0a42f71 100644
--- a/core/src/main/scala/spark/deploy/master/MasterSource.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/MasterSource.scala
@@ -1,8 +1,8 @@
-package spark.deploy.master
+package org.apache.spark.deploy.master
import com.codahale.metrics.{Gauge, MetricRegistry}
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
private[spark] class MasterSource(val master: Master) extends Source {
val metricRegistry = new MetricRegistry()
diff --git a/core/src/main/scala/spark/deploy/master/WorkerInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/WorkerInfo.scala
index 4135cfeb28..285e07a823 100644
--- a/core/src/main/scala/spark/deploy/master/WorkerInfo.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/WorkerInfo.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.deploy.master
+package org.apache.spark.deploy.master
import akka.actor.ActorRef
import scala.collection.mutable
-import spark.Utils
+import org.apache.spark.Utils
private[spark] class WorkerInfo(
val id: String,
diff --git a/core/src/main/scala/spark/deploy/master/WorkerState.scala b/core/src/main/scala/org/apache/spark/deploy/master/WorkerState.scala
index 3e50b7748d..b5ee6dca79 100644
--- a/core/src/main/scala/spark/deploy/master/WorkerState.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/WorkerState.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.master
+package org.apache.spark.deploy.master
private[spark] object WorkerState extends Enumeration("ALIVE", "DEAD", "DECOMMISSIONED") {
type WorkerState = Value
diff --git a/core/src/main/scala/spark/deploy/master/ui/ApplicationPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
index 2ad98f759c..6435c7f917 100644
--- a/core/src/main/scala/spark/deploy/master/ui/ApplicationPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.master.ui
+package org.apache.spark.deploy.master.ui
import scala.xml.Node
@@ -27,11 +27,11 @@ import javax.servlet.http.HttpServletRequest
import net.liftweb.json.JsonAST.JValue
-import spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
-import spark.deploy.JsonProtocol
-import spark.deploy.master.ExecutorInfo
-import spark.ui.UIUtils
-import spark.Utils
+import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
+import org.apache.spark.deploy.JsonProtocol
+import org.apache.spark.deploy.master.ExecutorInfo
+import org.apache.spark.ui.UIUtils
+import org.apache.spark.Utils
private[spark] class ApplicationPage(parent: MasterWebUI) {
val master = parent.masterActorRef
diff --git a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
index 093e523e23..58d3863009 100644
--- a/core/src/main/scala/spark/deploy/master/ui/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.master.ui
+package org.apache.spark.deploy.master.ui
import javax.servlet.http.HttpServletRequest
@@ -27,12 +27,12 @@ import akka.util.duration._
import net.liftweb.json.JsonAST.JValue
-import spark.Utils
-import spark.deploy.DeployWebUI
-import spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
-import spark.deploy.JsonProtocol
-import spark.deploy.master.{ApplicationInfo, WorkerInfo}
-import spark.ui.UIUtils
+import org.apache.spark.Utils
+import org.apache.spark.deploy.DeployWebUI
+import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
+import org.apache.spark.deploy.JsonProtocol
+import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo}
+import org.apache.spark.ui.UIUtils
private[spark] class IndexPage(parent: MasterWebUI) {
val master = parent.masterActorRef
diff --git a/core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
index c91e1db9f2..47b1e521f5 100644
--- a/core/src/main/scala/spark/deploy/master/ui/MasterWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.master.ui
+package org.apache.spark.deploy.master.ui
import akka.util.Duration
@@ -23,10 +23,10 @@ import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.server.{Handler, Server}
-import spark.{Logging, Utils}
-import spark.deploy.master.Master
-import spark.ui.JettyUtils
-import spark.ui.JettyUtils._
+import org.apache.spark.{Logging, Utils}
+import org.apache.spark.deploy.master.Master
+import org.apache.spark.ui.JettyUtils
+import org.apache.spark.ui.JettyUtils._
/**
* Web UI server for the standalone master.
@@ -76,5 +76,5 @@ class MasterWebUI(val master: Master, requestedPort: Int) extends Logging {
}
private[spark] object MasterWebUI {
- val STATIC_RESOURCE_DIR = "spark/ui/static"
+ val STATIC_RESOURCE_DIR = "org/apache/spark/ui/static"
}
diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index 34665ce451..01ce4a6dea 100644
--- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.worker
+package org.apache.spark.deploy.worker
import java.io._
import java.lang.System.getenv
@@ -25,9 +25,9 @@ import akka.actor.ActorRef
import com.google.common.base.Charsets
import com.google.common.io.Files
-import spark.{Utils, Logging}
-import spark.deploy.{ExecutorState, ApplicationDescription}
-import spark.deploy.DeployMessages.ExecutorStateChanged
+import org.apache.spark.{Utils, Logging}
+import org.apache.spark.deploy.{ExecutorState, ApplicationDescription}
+import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
/**
* Manages the execution of one executor process.
diff --git a/core/src/main/scala/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
index 053ac55226..86e8e7543b 100644
--- a/core/src/main/scala/spark/deploy/worker/Worker.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.worker
+package org.apache.spark.deploy.worker
import java.text.SimpleDateFormat
import java.util.Date
@@ -27,13 +27,13 @@ import akka.actor.{ActorRef, Props, Actor, ActorSystem, Terminated}
import akka.remote.{RemoteClientLifeCycleEvent, RemoteClientShutdown, RemoteClientDisconnected}
import akka.util.duration._
-import spark.{Logging, Utils}
-import spark.deploy.ExecutorState
-import spark.deploy.DeployMessages._
-import spark.deploy.master.Master
-import spark.deploy.worker.ui.WorkerWebUI
-import spark.metrics.MetricsSystem
-import spark.util.AkkaUtils
+import org.apache.spark.{Logging, Utils}
+import org.apache.spark.deploy.ExecutorState
+import org.apache.spark.deploy.DeployMessages._
+import org.apache.spark.deploy.master.Master
+import org.apache.spark.deploy.worker.ui.WorkerWebUI
+import org.apache.spark.metrics.MetricsSystem
+import org.apache.spark.util.AkkaUtils
private[spark] class Worker(
diff --git a/core/src/main/scala/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
index 9fcd3260ca..6d91223413 100644
--- a/core/src/main/scala/spark/deploy/worker/WorkerArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.deploy.worker
+package org.apache.spark.deploy.worker
-import spark.util.IntParam
-import spark.util.MemoryParam
-import spark.Utils
+import org.apache.spark.util.IntParam
+import org.apache.spark.util.MemoryParam
+import org.apache.spark.Utils
import java.lang.management.ManagementFactory
/**
diff --git a/core/src/main/scala/spark/deploy/worker/WorkerSource.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerSource.scala
index 39cb8e5690..6427c0178f 100644
--- a/core/src/main/scala/spark/deploy/worker/WorkerSource.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerSource.scala
@@ -1,8 +1,8 @@
-package spark.deploy.worker
+package org.apache.spark.deploy.worker
import com.codahale.metrics.{Gauge, MetricRegistry}
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
private[spark] class WorkerSource(val worker: Worker) extends Source {
val sourceName = "worker"
diff --git a/core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala
index 243e0765cb..6192c2324b 100644
--- a/core/src/main/scala/spark/deploy/worker/ui/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.worker.ui
+package org.apache.spark.deploy.worker.ui
import javax.servlet.http.HttpServletRequest
@@ -27,11 +27,11 @@ import akka.util.duration._
import net.liftweb.json.JsonAST.JValue
-import spark.Utils
-import spark.deploy.JsonProtocol
-import spark.deploy.DeployMessages.{RequestWorkerState, WorkerStateResponse}
-import spark.deploy.worker.ExecutorRunner
-import spark.ui.UIUtils
+import org.apache.spark.Utils
+import org.apache.spark.deploy.JsonProtocol
+import org.apache.spark.deploy.DeployMessages.{RequestWorkerState, WorkerStateResponse}
+import org.apache.spark.deploy.worker.ExecutorRunner
+import org.apache.spark.ui.UIUtils
private[spark] class IndexPage(parent: WorkerWebUI) {
diff --git a/core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
index 0a75ad8cf4..bb8165ac09 100644
--- a/core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.worker.ui
+package org.apache.spark.deploy.worker.ui
import akka.util.{Duration, Timeout}
@@ -25,11 +25,11 @@ import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.server.{Handler, Server}
-import spark.deploy.worker.Worker
-import spark.{Utils, Logging}
-import spark.ui.JettyUtils
-import spark.ui.JettyUtils._
-import spark.ui.UIUtils
+import org.apache.spark.deploy.worker.Worker
+import org.apache.spark.{Utils, Logging}
+import org.apache.spark.ui.JettyUtils
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.ui.UIUtils
/**
* Web UI server for the standalone worker.
@@ -185,6 +185,6 @@ class WorkerWebUI(val worker: Worker, val workDir: File, requestedPort: Option[I
}
private[spark] object WorkerWebUI {
- val STATIC_RESOURCE_DIR = "spark/ui/static"
+ val STATIC_RESOURCE_DIR = "org/apache/spark/ui/static"
val DEFAULT_PORT="8081"
}
diff --git a/core/src/main/scala/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala
index fa82d2b324..5446a3fca9 100644
--- a/core/src/main/scala/spark/executor/Executor.scala
+++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.executor
+package org.apache.spark.executor
import java.io.{File}
import java.lang.management.ManagementFactory
@@ -25,8 +25,8 @@ import java.util.concurrent._
import scala.collection.JavaConversions._
import scala.collection.mutable.HashMap
-import spark.scheduler._
-import spark._
+import org.apache.spark.scheduler._
+import org.apache.spark._
/**
@@ -225,13 +225,13 @@ private[spark] class Executor(
if (classUri != null) {
logInfo("Using REPL class URI: " + classUri)
try {
- val klass = Class.forName("spark.repl.ExecutorClassLoader")
+ val klass = Class.forName("org.apache.spark.repl.ExecutorClassLoader")
.asInstanceOf[Class[_ <: ClassLoader]]
val constructor = klass.getConstructor(classOf[String], classOf[ClassLoader])
return constructor.newInstance(classUri, parent)
} catch {
case _: ClassNotFoundException =>
- logError("Could not find spark.repl.ExecutorClassLoader on classpath!")
+ logError("Could not find org.apache.spark.repl.ExecutorClassLoader on classpath!")
System.exit(1)
null
}
diff --git a/core/src/main/scala/spark/executor/ExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
index 33a6f8a824..ad7dd34c76 100644
--- a/core/src/main/scala/spark/executor/ExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.executor
+package org.apache.spark.executor
import java.nio.ByteBuffer
-import spark.TaskState.TaskState
+import org.apache.spark.TaskState.TaskState
/**
* A pluggable interface used by the Executor to send updates to the cluster scheduler.
diff --git a/core/src/main/scala/spark/executor/ExecutorExitCode.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorExitCode.scala
index 64b9fb88f8..e5c9bbbe28 100644
--- a/core/src/main/scala/spark/executor/ExecutorExitCode.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorExitCode.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.executor
+package org.apache.spark.executor
/**
* These are exit codes that executors should use to provide the master with information about
diff --git a/core/src/main/scala/spark/executor/ExecutorSource.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
index d491a3c0c9..17653cd560 100644
--- a/core/src/main/scala/spark/executor/ExecutorSource.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala
@@ -1,4 +1,4 @@
-package spark.executor
+package org.apache.spark.executor
import com.codahale.metrics.{Gauge, MetricRegistry}
@@ -8,7 +8,7 @@ import org.apache.hadoop.fs.LocalFileSystem
import scala.collection.JavaConversions._
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
class ExecutorSource(val executor: Executor) extends Source {
private def fileStats(scheme: String) : Option[FileSystem.Statistics] =
diff --git a/core/src/main/scala/spark/executor/ExecutorURLClassLoader.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorURLClassLoader.scala
index 09d12fb65b..f9bfe8ed2f 100644
--- a/core/src/main/scala/spark/executor/ExecutorURLClassLoader.scala
+++ b/core/src/main/scala/org/apache/spark/executor/ExecutorURLClassLoader.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.executor
+package org.apache.spark.executor
import java.net.{URLClassLoader, URL}
diff --git a/core/src/main/scala/spark/executor/MesosExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
index 4961c42fad..410a94df6b 100644
--- a/core/src/main/scala/spark/executor/MesosExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.executor
+package org.apache.spark.executor
import java.nio.ByteBuffer
import org.apache.mesos.{Executor => MesosExecutor, MesosExecutorDriver, MesosNativeLibrary, ExecutorDriver}
import org.apache.mesos.Protos.{TaskState => MesosTaskState, TaskStatus => MesosTaskStatus, _}
-import spark.TaskState.TaskState
+import org.apache.spark.TaskState.TaskState
import com.google.protobuf.ByteString
-import spark.{Utils, Logging}
-import spark.TaskState
+import org.apache.spark.{Utils, Logging}
+import org.apache.spark.TaskState
private[spark] class MesosExecutorBackend
extends MesosExecutor
diff --git a/core/src/main/scala/spark/executor/StandaloneExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/StandaloneExecutorBackend.scala
index b5fb6dbe29..65801f75b7 100644
--- a/core/src/main/scala/spark/executor/StandaloneExecutorBackend.scala
+++ b/core/src/main/scala/org/apache/spark/executor/StandaloneExecutorBackend.scala
@@ -15,17 +15,17 @@
* limitations under the License.
*/
-package spark.executor
+package org.apache.spark.executor
import java.nio.ByteBuffer
import akka.actor.{ActorRef, Actor, Props, Terminated}
import akka.remote.{RemoteClientLifeCycleEvent, RemoteClientShutdown, RemoteClientDisconnected}
-import spark.{Logging, Utils, SparkEnv}
-import spark.TaskState.TaskState
-import spark.scheduler.cluster.StandaloneClusterMessages._
-import spark.util.AkkaUtils
+import org.apache.spark.{Logging, Utils, SparkEnv}
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler.cluster.StandaloneClusterMessages._
+import org.apache.spark.util.AkkaUtils
private[spark] class StandaloneExecutorBackend(
diff --git a/core/src/main/scala/spark/executor/TaskMetrics.scala b/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
index 47b8890bee..f311141148 100644
--- a/core/src/main/scala/spark/executor/TaskMetrics.scala
+++ b/core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.executor
+package org.apache.spark.executor
class TaskMetrics extends Serializable {
/**
diff --git a/core/src/main/scala/spark/io/CompressionCodec.scala b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
index 0adebecadb..90a0420caf 100644
--- a/core/src/main/scala/spark/io/CompressionCodec.scala
+++ b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.io
+package org.apache.spark.io
import java.io.{InputStream, OutputStream}
@@ -55,7 +55,7 @@ private[spark] object CompressionCodec {
/**
- * LZF implementation of [[spark.io.CompressionCodec]].
+ * LZF implementation of [[org.apache.spark.io.CompressionCodec]].
*/
class LZFCompressionCodec extends CompressionCodec {
@@ -68,7 +68,7 @@ class LZFCompressionCodec extends CompressionCodec {
/**
- * Snappy implementation of [[spark.io.CompressionCodec]].
+ * Snappy implementation of [[org.apache.spark.io.CompressionCodec]].
* Block size can be configured by spark.io.compression.snappy.block.size.
*/
class SnappyCompressionCodec extends CompressionCodec {
diff --git a/core/src/main/scala/spark/metrics/MetricsConfig.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
index d7fb5378a4..0f9c4e00b1 100644
--- a/core/src/main/scala/spark/metrics/MetricsConfig.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.metrics
+package org.apache.spark.metrics
import java.util.Properties
import java.io.{File, FileInputStream, InputStream, IOException}
@@ -23,7 +23,7 @@ import java.io.{File, FileInputStream, InputStream, IOException}
import scala.collection.mutable
import scala.util.matching.Regex
-import spark.Logging
+import org.apache.spark.Logging
private[spark] class MetricsConfig(val configFile: Option[String]) extends Logging {
initLogging()
@@ -36,7 +36,7 @@ private[spark] class MetricsConfig(val configFile: Option[String]) extends Loggi
var propertyCategories: mutable.HashMap[String, Properties] = null
private def setDefaultProperties(prop: Properties) {
- prop.setProperty("*.sink.servlet.class", "spark.metrics.sink.MetricsServlet")
+ prop.setProperty("*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet")
prop.setProperty("*.sink.servlet.uri", "/metrics/json")
prop.setProperty("*.sink.servlet.sample", "false")
prop.setProperty("master.sink.servlet.uri", "/metrics/master/json")
diff --git a/core/src/main/scala/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
index 4e6c6b26c8..bec0c83be8 100644
--- a/core/src/main/scala/spark/metrics/MetricsSystem.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.metrics
+package org.apache.spark.metrics
import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
@@ -24,9 +24,9 @@ import java.util.concurrent.TimeUnit
import scala.collection.mutable
-import spark.Logging
-import spark.metrics.sink.{MetricsServlet, Sink}
-import spark.metrics.source.Source
+import org.apache.spark.Logging
+import org.apache.spark.metrics.sink.{MetricsServlet, Sink}
+import org.apache.spark.metrics.source.Source
/**
* Spark Metrics System, created by specific "instance", combined by source,
diff --git a/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
index 966ba37c20..bce257d6e6 100644
--- a/core/src/main/scala/spark/metrics/sink/ConsoleSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
import com.codahale.metrics.{ConsoleReporter, MetricRegistry}
import java.util.Properties
import java.util.concurrent.TimeUnit
-import spark.metrics.MetricsSystem
+import org.apache.spark.metrics.MetricsSystem
class ConsoleSink(val property: Properties, val registry: MetricRegistry) extends Sink {
val CONSOLE_DEFAULT_PERIOD = 10
diff --git a/core/src/main/scala/spark/metrics/sink/CsvSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
index cb990afdef..3d1a06a395 100644
--- a/core/src/main/scala/spark/metrics/sink/CsvSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
import com.codahale.metrics.{CsvReporter, MetricRegistry}
@@ -23,7 +23,7 @@ import java.io.File
import java.util.{Locale, Properties}
import java.util.concurrent.TimeUnit
-import spark.metrics.MetricsSystem
+import org.apache.spark.metrics.MetricsSystem
class CsvSink(val property: Properties, val registry: MetricRegistry) extends Sink {
val CSV_KEY_PERIOD = "period"
diff --git a/core/src/main/scala/spark/metrics/sink/JmxSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
index ee04544c0e..621d086d41 100644
--- a/core/src/main/scala/spark/metrics/sink/JmxSink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
import com.codahale.metrics.{JmxReporter, MetricRegistry}
diff --git a/core/src/main/scala/spark/metrics/sink/MetricsServlet.scala b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
index 17432b1ed1..4e90dd4323 100644
--- a/core/src/main/scala/spark/metrics/sink/MetricsServlet.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
import com.codahale.metrics.MetricRegistry
import com.codahale.metrics.json.MetricsModule
@@ -28,7 +28,7 @@ import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.server.Handler
-import spark.ui.JettyUtils
+import org.apache.spark.ui.JettyUtils
class MetricsServlet(val property: Properties, val registry: MetricRegistry) extends Sink {
val SERVLET_KEY_URI = "uri"
diff --git a/core/src/main/scala/spark/metrics/sink/Sink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/Sink.scala
index dad1a7f0fe..3a739aa563 100644
--- a/core/src/main/scala/spark/metrics/sink/Sink.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/sink/Sink.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.metrics.sink
+package org.apache.spark.metrics.sink
trait Sink {
def start: Unit
def stop: Unit
-} \ No newline at end of file
+}
diff --git a/core/src/main/scala/spark/metrics/source/JvmSource.scala b/core/src/main/scala/org/apache/spark/metrics/source/JvmSource.scala
index e771008557..75cb2b8973 100644
--- a/core/src/main/scala/spark/metrics/source/JvmSource.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/source/JvmSource.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.metrics.source
+package org.apache.spark.metrics.source
import com.codahale.metrics.MetricRegistry
import com.codahale.metrics.jvm.{GarbageCollectorMetricSet, MemoryUsageGaugeSet}
diff --git a/core/src/main/scala/spark/metrics/source/Source.scala b/core/src/main/scala/org/apache/spark/metrics/source/Source.scala
index 76199a004b..3fee55cc6d 100644
--- a/core/src/main/scala/spark/metrics/source/Source.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/source/Source.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.metrics.source
+package org.apache.spark.metrics.source
import com.codahale.metrics.MetricRegistry
diff --git a/core/src/main/scala/spark/network/BufferMessage.scala b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
index e566aeac13..f736bb3713 100644
--- a/core/src/main/scala/spark/network/BufferMessage.scala
+++ b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
-import spark.storage.BlockManager
+import org.apache.spark.storage.BlockManager
private[spark]
diff --git a/core/src/main/scala/spark/network/Connection.scala b/core/src/main/scala/org/apache/spark/network/Connection.scala
index 1e571d39ae..95cb0206ac 100644
--- a/core/src/main/scala/spark/network/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/Connection.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
-import spark._
+import org.apache.spark._
import scala.collection.mutable.{HashMap, Queue, ArrayBuffer}
diff --git a/core/src/main/scala/spark/network/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
index 8b9f3ae18c..9e2233c07b 100644
--- a/core/src/main/scala/spark/network/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
-import spark._
+import org.apache.spark._
import java.nio._
import java.nio.channels._
diff --git a/core/src/main/scala/spark/network/ConnectionManagerId.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
index 9d5c518293..0839c011b8 100644
--- a/core/src/main/scala/spark/network/ConnectionManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
import java.net.InetSocketAddress
-import spark.Utils
+import org.apache.spark.Utils
private[spark] case class ConnectionManagerId(host: String, port: Int) {
diff --git a/core/src/main/scala/spark/network/ConnectionManagerTest.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
index 9e3827aaf5..8d9ad9604d 100644
--- a/core/src/main/scala/spark/network/ConnectionManagerTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
-import spark._
-import spark.SparkContext._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
import scala.io.Source
diff --git a/core/src/main/scala/spark/network/Message.scala b/core/src/main/scala/org/apache/spark/network/Message.scala
index a25457ea35..f2ecc6d439 100644
--- a/core/src/main/scala/spark/network/Message.scala
+++ b/core/src/main/scala/org/apache/spark/network/Message.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
import java.nio.ByteBuffer
import java.net.InetSocketAddress
diff --git a/core/src/main/scala/spark/network/MessageChunk.scala b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
index 784db5ab62..e0fe57b80d 100644
--- a/core/src/main/scala/spark/network/MessageChunk.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
import java.nio.ByteBuffer
diff --git a/core/src/main/scala/spark/network/MessageChunkHeader.scala b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
index 18d0cbcc14..235fbc39b3 100644
--- a/core/src/main/scala/spark/network/MessageChunkHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
import java.net.InetAddress
import java.net.InetSocketAddress
diff --git a/core/src/main/scala/spark/network/ReceiverTest.scala b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
index 2bbc736f40..781715108b 100644
--- a/core/src/main/scala/spark/network/ReceiverTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
import java.nio.ByteBuffer
import java.net.InetAddress
diff --git a/core/src/main/scala/spark/network/SenderTest.scala b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
index 542c54c36b..777574980f 100644
--- a/core/src/main/scala/spark/network/SenderTest.scala
+++ b/core/src/main/scala/org/apache/spark/network/SenderTest.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network
+package org.apache.spark.network
import java.nio.ByteBuffer
import java.net.InetAddress
diff --git a/core/src/main/scala/spark/network/netty/FileHeader.scala b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
index bf46d32aa3..3c29700920 100644
--- a/core/src/main/scala/spark/network/netty/FileHeader.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.network.netty
+package org.apache.spark.network.netty
import io.netty.buffer._
-import spark.Logging
+import org.apache.spark.Logging
private[spark] class FileHeader (
val fileLen: Int,
diff --git a/core/src/main/scala/spark/network/netty/ShuffleCopier.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
index b01f6369f6..9493ccffd9 100644
--- a/core/src/main/scala/spark/network/netty/ShuffleCopier.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.network.netty
+package org.apache.spark.network.netty
import java.util.concurrent.Executors
@@ -23,8 +23,8 @@ import io.netty.buffer.ByteBuf
import io.netty.channel.ChannelHandlerContext
import io.netty.util.CharsetUtil
-import spark.Logging
-import spark.network.ConnectionManagerId
+import org.apache.spark.Logging
+import org.apache.spark.network.ConnectionManagerId
import scala.collection.JavaConverters._
diff --git a/core/src/main/scala/spark/network/netty/ShuffleSender.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
index cdf88b03a0..537f225469 100644
--- a/core/src/main/scala/spark/network/netty/ShuffleSender.scala
+++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.network.netty
+package org.apache.spark.network.netty
import java.io.File
-import spark.Logging
+import org.apache.spark.Logging
private[spark] class ShuffleSender(portIn: Int, val pResolver: PathResolver) extends Logging {
diff --git a/core/src/main/scala/spark/package.scala b/core/src/main/scala/org/apache/spark/package.scala
index b244bfbf06..1126480689 100644
--- a/core/src/main/scala/spark/package.scala
+++ b/core/src/main/scala/org/apache/spark/package.scala
@@ -16,16 +16,16 @@
*/
/**
- * Core Spark functionality. [[spark.SparkContext]] serves as the main entry point to Spark, while
- * [[spark.RDD]] is the data type representing a distributed collection, and provides most
+ * Core Spark functionality. [[org.apache.spark.SparkContext]] serves as the main entry point to Spark, while
+ * [[org.apache.spark.RDD]] is the data type representing a distributed collection, and provides most
* parallel operations.
*
- * In addition, [[spark.PairRDDFunctions]] contains operations available only on RDDs of key-value
- * pairs, such as `groupByKey` and `join`; [[spark.DoubleRDDFunctions]] contains operations
- * available only on RDDs of Doubles; and [[spark.SequenceFileRDDFunctions]] contains operations
+ * In addition, [[org.apache.spark.PairRDDFunctions]] contains operations available only on RDDs of key-value
+ * pairs, such as `groupByKey` and `join`; [[org.apache.spark.DoubleRDDFunctions]] contains operations
+ * available only on RDDs of Doubles; and [[org.apache.spark.SequenceFileRDDFunctions]] contains operations
* available on RDDs that can be saved as SequenceFiles. These operations are automatically
* available on any RDD of the right type (e.g. RDD[(Int, Int)] through implicit conversions when
- * you `import spark.SparkContext._`.
+ * you `import org.apache.spark.SparkContext._`.
*/
package object spark {
// For package docs only
diff --git a/core/src/main/scala/spark/partial/ApproximateActionListener.scala b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
index 691d939150..c5d51bee50 100644
--- a/core/src/main/scala/spark/partial/ApproximateActionListener.scala
+++ b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
-import spark._
-import spark.scheduler.JobListener
+import org.apache.spark._
+import org.apache.spark.scheduler.JobListener
/**
* A JobListener for an approximate single-result action, such as count() or non-parallel reduce().
diff --git a/core/src/main/scala/spark/partial/ApproximateEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/ApproximateEvaluator.scala
index 5eae144dfb..9c2859c8b9 100644
--- a/core/src/main/scala/spark/partial/ApproximateEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/ApproximateEvaluator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
/**
* An object that computes a function incrementally by merging in results of type U from multiple
diff --git a/core/src/main/scala/spark/partial/BoundedDouble.scala b/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
index 8bdbe6c012..5f4450859c 100644
--- a/core/src/main/scala/spark/partial/BoundedDouble.scala
+++ b/core/src/main/scala/org/apache/spark/partial/BoundedDouble.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
/**
* A Double with error bars on it.
diff --git a/core/src/main/scala/spark/partial/CountEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/CountEvaluator.scala
index 6aa92094eb..3155dfe165 100644
--- a/core/src/main/scala/spark/partial/CountEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/CountEvaluator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
import cern.jet.stat.Probability
diff --git a/core/src/main/scala/spark/partial/GroupedCountEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
index ebe2e5a1e3..e519e3a548 100644
--- a/core/src/main/scala/spark/partial/GroupedCountEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
import java.util.{HashMap => JHashMap}
import java.util.{Map => JMap}
diff --git a/core/src/main/scala/spark/partial/GroupedMeanEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
index 2dadbbd5fb..cf8a5680b6 100644
--- a/core/src/main/scala/spark/partial/GroupedMeanEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
import java.util.{HashMap => JHashMap}
import java.util.{Map => JMap}
@@ -24,7 +24,7 @@ import scala.collection.mutable.HashMap
import scala.collection.Map
import scala.collection.JavaConversions.mapAsScalaMap
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
/**
* An ApproximateEvaluator for means by key. Returns a map of key to confidence interval.
diff --git a/core/src/main/scala/spark/partial/GroupedSumEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
index ae2b63f7cb..8225a5d933 100644
--- a/core/src/main/scala/spark/partial/GroupedSumEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
import java.util.{HashMap => JHashMap}
import java.util.{Map => JMap}
@@ -24,7 +24,7 @@ import scala.collection.mutable.HashMap
import scala.collection.Map
import scala.collection.JavaConversions.mapAsScalaMap
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
/**
* An ApproximateEvaluator for sums by key. Returns a map of key to confidence interval.
diff --git a/core/src/main/scala/spark/partial/MeanEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/MeanEvaluator.scala
index 5ddcad7075..d24959cba8 100644
--- a/core/src/main/scala/spark/partial/MeanEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/MeanEvaluator.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
import cern.jet.stat.Probability
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
/**
* An ApproximateEvaluator for means.
diff --git a/core/src/main/scala/spark/partial/PartialResult.scala b/core/src/main/scala/org/apache/spark/partial/PartialResult.scala
index 922a9f9bc6..5ce49b8100 100644
--- a/core/src/main/scala/spark/partial/PartialResult.scala
+++ b/core/src/main/scala/org/apache/spark/partial/PartialResult.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
class PartialResult[R](initialVal: R, isFinal: Boolean) {
private var finalValue: Option[R] = if (isFinal) Some(initialVal) else None
diff --git a/core/src/main/scala/spark/partial/StudentTCacher.scala b/core/src/main/scala/org/apache/spark/partial/StudentTCacher.scala
index f3bb987d46..92915ee66d 100644
--- a/core/src/main/scala/spark/partial/StudentTCacher.scala
+++ b/core/src/main/scala/org/apache/spark/partial/StudentTCacher.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
import cern.jet.stat.Probability
diff --git a/core/src/main/scala/spark/partial/SumEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/SumEvaluator.scala
index 4083abef03..a74f800944 100644
--- a/core/src/main/scala/spark/partial/SumEvaluator.scala
+++ b/core/src/main/scala/org/apache/spark/partial/SumEvaluator.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.partial
+package org.apache.spark.partial
import cern.jet.stat.Probability
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
/**
* An ApproximateEvaluator for sums. It estimates the mean and the cont and multiplies them
diff --git a/core/src/main/scala/spark/rdd/BlockRDD.scala b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
index 03800584ae..4bb01efa86 100644
--- a/core/src/main/scala/spark/rdd/BlockRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{RDD, SparkContext, SparkEnv, Partition, TaskContext}
-import spark.storage.BlockManager
+import org.apache.spark.{RDD, SparkContext, SparkEnv, Partition, TaskContext}
+import org.apache.spark.storage.BlockManager
private[spark] class BlockRDDPartition(val blockId: String, idx: Int) extends Partition {
val index = idx
diff --git a/core/src/main/scala/spark/rdd/CartesianRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
index 91b3e69d6f..9b0c882481 100644
--- a/core/src/main/scala/spark/rdd/CartesianRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import java.io.{ObjectOutputStream, IOException}
-import spark._
+import org.apache.spark._
private[spark]
diff --git a/core/src/main/scala/spark/rdd/CheckpointRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
index 1ad5fe6539..3311757189 100644
--- a/core/src/main/scala/spark/rdd/CheckpointRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark._
+import org.apache.spark._
import org.apache.hadoop.mapred.{FileInputFormat, SequenceFileInputFormat, JobConf, Reporter}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.{NullWritable, BytesWritable}
@@ -138,7 +138,7 @@ private[spark] object CheckpointRDD extends Logging {
// each split file having multiple blocks. This needs to be run on a
// cluster (mesos or standalone) using HDFS.
def main(args: Array[String]) {
- import spark._
+ import org.apache.spark._
val Array(cluster, hdfsPath) = args
val env = SparkEnv.get
diff --git a/core/src/main/scala/spark/rdd/CoGroupedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
index 01b6c23dcc..dcc35e8d0e 100644
--- a/core/src/main/scala/spark/rdd/CoGroupedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import java.io.{ObjectOutputStream, IOException}
import java.util.{HashMap => JHashMap}
@@ -23,8 +23,8 @@ import java.util.{HashMap => JHashMap}
import scala.collection.JavaConversions
import scala.collection.mutable.ArrayBuffer
-import spark.{Partition, Partitioner, RDD, SparkEnv, TaskContext}
-import spark.{Dependency, OneToOneDependency, ShuffleDependency}
+import org.apache.spark.{Partition, Partitioner, RDD, SparkEnv, TaskContext}
+import org.apache.spark.{Dependency, OneToOneDependency, ShuffleDependency}
private[spark] sealed trait CoGroupSplitDep extends Serializable
diff --git a/core/src/main/scala/spark/rdd/CoalescedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
index e612d026b2..c5de6362a9 100644
--- a/core/src/main/scala/spark/rdd/CoalescedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark._
+import org.apache.spark._
import java.io.{ObjectOutputStream, IOException}
import scala.collection.mutable
import scala.Some
diff --git a/core/src/main/scala/spark/rdd/EmptyRDD.scala b/core/src/main/scala/org/apache/spark/rdd/EmptyRDD.scala
index d7d4db5d30..24ce4abbc4 100644
--- a/core/src/main/scala/spark/rdd/EmptyRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/EmptyRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{RDD, SparkContext, SparkEnv, Partition, TaskContext}
+import org.apache.spark.{RDD, SparkContext, SparkEnv, Partition, TaskContext}
/**
diff --git a/core/src/main/scala/spark/rdd/FilteredRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
index 783508cfd1..4df8ceb58b 100644
--- a/core/src/main/scala/spark/rdd/FilteredRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{OneToOneDependency, RDD, Partition, TaskContext}
+import org.apache.spark.{OneToOneDependency, RDD, Partition, TaskContext}
private[spark] class FilteredRDD[T: ClassManifest](
prev: RDD[T],
diff --git a/core/src/main/scala/spark/rdd/FlatMappedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
index ed75eac3ff..2bf7653af1 100644
--- a/core/src/main/scala/spark/rdd/FlatMappedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{RDD, Partition, TaskContext}
private[spark]
diff --git a/core/src/main/scala/spark/rdd/FlatMappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
index a6bdce89d8..e544720b05 100644
--- a/core/src/main/scala/spark/rdd/FlatMappedValuesRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{TaskContext, Partition, RDD}
+import org.apache.spark.{TaskContext, Partition, RDD}
private[spark]
diff --git a/core/src/main/scala/spark/rdd/GlommedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
index 1573f8a289..2ce94199f2 100644
--- a/core/src/main/scala/spark/rdd/GlommedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{RDD, Partition, TaskContext}
private[spark] class GlommedRDD[T: ClassManifest](prev: RDD[T])
extends RDD[Array[T]](prev) {
diff --git a/core/src/main/scala/spark/rdd/HadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
index e512423fd6..08e6154bb9 100644
--- a/core/src/main/scala/spark/rdd/HadoopRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import java.io.EOFException
import java.util.NoSuchElementException
@@ -32,8 +32,8 @@ import org.apache.hadoop.mapred.RecordReader
import org.apache.hadoop.mapred.Reporter
import org.apache.hadoop.util.ReflectionUtils
-import spark.{Dependency, Logging, Partition, RDD, SerializableWritable, SparkContext, SparkEnv, TaskContext}
-import spark.util.NextIterator
+import org.apache.spark.{Dependency, Logging, Partition, RDD, SerializableWritable, SparkContext, SparkEnv, TaskContext}
+import org.apache.spark.util.NextIterator
import org.apache.hadoop.conf.{Configuration, Configurable}
diff --git a/core/src/main/scala/spark/rdd/JdbcRDD.scala b/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
index 59132437d2..3db460b3ce 100644
--- a/core/src/main/scala/spark/rdd/JdbcRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/JdbcRDD.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import java.sql.{Connection, ResultSet}
-import spark.{Logging, Partition, RDD, SparkContext, TaskContext}
-import spark.util.NextIterator
+import org.apache.spark.{Logging, Partition, RDD, SparkContext, TaskContext}
+import org.apache.spark.util.NextIterator
private[spark] class JdbcPartition(idx: Int, val lower: Long, val upper: Long) extends Partition {
override def index = idx
diff --git a/core/src/main/scala/spark/rdd/MapPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
index af8f0a112f..13009d3e17 100644
--- a/core/src/main/scala/spark/rdd/MapPartitionsRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{RDD, Partition, TaskContext}
private[spark]
diff --git a/core/src/main/scala/spark/rdd/MapPartitionsWithIndexRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsWithIndexRDD.scala
index 3b4e9518fd..1683050b86 100644
--- a/core/src/main/scala/spark/rdd/MapPartitionsWithIndexRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsWithIndexRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{RDD, Partition, TaskContext}
/**
diff --git a/core/src/main/scala/spark/rdd/MappedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MappedRDD.scala
index 8b411dd85d..26d4806edb 100644
--- a/core/src/main/scala/spark/rdd/MappedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MappedRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{RDD, Partition, TaskContext}
private[spark]
class MappedRDD[U: ClassManifest, T: ClassManifest](prev: RDD[T], f: T => U)
diff --git a/core/src/main/scala/spark/rdd/MappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
index 8334e3b557..a405e9acdd 100644
--- a/core/src/main/scala/spark/rdd/MappedValuesRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{TaskContext, Partition, RDD}
+import org.apache.spark.{TaskContext, Partition, RDD}
private[spark]
class MappedValuesRDD[K, V, U](prev: RDD[_ <: Product2[K, V]], f: V => U)
diff --git a/core/src/main/scala/spark/rdd/NewHadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala
index b1877dc06e..114b504486 100644
--- a/core/src/main/scala/spark/rdd/NewHadoopRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/NewHadoopRDD.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import java.text.SimpleDateFormat
import java.util.Date
@@ -24,7 +24,7 @@ import org.apache.hadoop.conf.{Configurable, Configuration}
import org.apache.hadoop.io.Writable
import org.apache.hadoop.mapreduce._
-import spark.{Dependency, Logging, Partition, RDD, SerializableWritable, SparkContext, TaskContext}
+import org.apache.spark.{Dependency, Logging, Partition, RDD, SerializableWritable, SparkContext, TaskContext}
private[spark]
diff --git a/core/src/main/scala/spark/rdd/OrderedRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
index 9154b76035..4c3df0eaf4 100644
--- a/core/src/main/scala/spark/rdd/OrderedRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{RangePartitioner, Logging, RDD}
+import org.apache.spark.{RangePartitioner, Logging, RDD}
/**
* Extra functions available on RDDs of (key, value) pairs where the key is sortable through
diff --git a/core/src/main/scala/spark/rdd/ParallelCollectionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
index 33079cd539..8db3611054 100644
--- a/core/src/main/scala/spark/rdd/ParallelCollectionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import scala.collection.immutable.NumericRange
import scala.collection.mutable.ArrayBuffer
import scala.collection.Map
-import spark._
+import org.apache.spark._
import java.io._
import scala.Serializable
diff --git a/core/src/main/scala/spark/rdd/PartitionPruningRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
index d8700becb0..8e79a5c874 100644
--- a/core/src/main/scala/spark/rdd/PartitionPruningRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{NarrowDependency, RDD, SparkEnv, Partition, TaskContext}
+import org.apache.spark.{NarrowDependency, RDD, SparkEnv, Partition, TaskContext}
class PartitionPruningRDDPartition(idx: Int, val parentSplit: Partition) extends Partition {
diff --git a/core/src/main/scala/spark/rdd/PipedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
index 2cefdc78b0..98498d5ddf 100644
--- a/core/src/main/scala/spark/rdd/PipedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import java.io.PrintWriter
import java.util.StringTokenizer
@@ -25,8 +25,8 @@ import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
import scala.io.Source
-import spark.{RDD, SparkEnv, Partition, TaskContext}
-import spark.broadcast.Broadcast
+import org.apache.spark.{RDD, SparkEnv, Partition, TaskContext}
+import org.apache.spark.broadcast.Broadcast
/**
diff --git a/core/src/main/scala/spark/rdd/SampledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
index 574c9b141d..1e8d89e912 100644
--- a/core/src/main/scala/spark/rdd/SampledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import java.util.Random
import cern.jet.random.Poisson
import cern.jet.random.engine.DRand
-import spark.{RDD, Partition, TaskContext}
+import org.apache.spark.{RDD, Partition, TaskContext}
private[spark]
class SampledRDDPartition(val prev: Partition, val seed: Int) extends Partition with Serializable {
diff --git a/core/src/main/scala/spark/rdd/ShuffledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
index 51c05af064..f0e9ab8b80 100644
--- a/core/src/main/scala/spark/rdd/ShuffledRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{Dependency, Partitioner, RDD, SparkEnv, ShuffleDependency, Partition, TaskContext}
+import org.apache.spark.{Dependency, Partitioner, RDD, SparkEnv, ShuffleDependency, Partition, TaskContext}
private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition {
diff --git a/core/src/main/scala/spark/rdd/SubtractedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
index dadef5e17d..7369dfaa74 100644
--- a/core/src/main/scala/spark/rdd/SubtractedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala
@@ -15,19 +15,19 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import java.util.{HashMap => JHashMap}
import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
-import spark.RDD
-import spark.Partitioner
-import spark.Dependency
-import spark.TaskContext
-import spark.Partition
-import spark.SparkEnv
-import spark.ShuffleDependency
-import spark.OneToOneDependency
+import org.apache.spark.RDD
+import org.apache.spark.Partitioner
+import org.apache.spark.Dependency
+import org.apache.spark.TaskContext
+import org.apache.spark.Partition
+import org.apache.spark.SparkEnv
+import org.apache.spark.ShuffleDependency
+import org.apache.spark.OneToOneDependency
/**
diff --git a/core/src/main/scala/spark/rdd/UnionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
index 2776826f18..fd02476b62 100644
--- a/core/src/main/scala/spark/rdd/UnionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import scala.collection.mutable.ArrayBuffer
-import spark.{Dependency, RangeDependency, RDD, SparkContext, Partition, TaskContext}
+import org.apache.spark.{Dependency, RangeDependency, RDD, SparkContext, Partition, TaskContext}
import java.io.{ObjectOutputStream, IOException}
private[spark] class UnionPartition[T: ClassManifest](idx: Int, rdd: RDD[T], splitIndex: Int)
diff --git a/core/src/main/scala/spark/rdd/ZippedPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
index 9a0831bd89..5ae1db3e67 100644
--- a/core/src/main/scala/spark/rdd/ZippedPartitionsRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{Utils, OneToOneDependency, RDD, SparkContext, Partition, TaskContext}
+import org.apache.spark.{Utils, OneToOneDependency, RDD, SparkContext, Partition, TaskContext}
import java.io.{ObjectOutputStream, IOException}
private[spark] class ZippedPartitionsPartition(
diff --git a/core/src/main/scala/spark/rdd/ZippedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
index 4074e50e44..3bd00d291b 100644
--- a/core/src/main/scala/spark/rdd/ZippedRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
-import spark.{Utils, OneToOneDependency, RDD, SparkContext, Partition, TaskContext}
+import org.apache.spark.{Utils, OneToOneDependency, RDD, SparkContext, Partition, TaskContext}
import java.io.{ObjectOutputStream, IOException}
diff --git a/core/src/main/scala/spark/scheduler/ActiveJob.scala b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
index fecc3e9648..0b04607d01 100644
--- a/core/src/main/scala/spark/scheduler/ActiveJob.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
-import spark.TaskContext
+import org.apache.spark.TaskContext
import java.util.Properties
diff --git a/core/src/main/scala/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
index 7275bd346a..5ac700bbf4 100644
--- a/core/src/main/scala/spark/scheduler/DAGScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.io.NotSerializableException
import java.util.Properties
@@ -24,12 +24,12 @@ import java.util.concurrent.atomic.AtomicInteger
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, Map}
-import spark._
-import spark.executor.TaskMetrics
-import spark.partial.{ApproximateActionListener, ApproximateEvaluator, PartialResult}
-import spark.scheduler.cluster.TaskInfo
-import spark.storage.{BlockManager, BlockManagerMaster}
-import spark.util.{MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark._
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.partial.{ApproximateActionListener, ApproximateEvaluator, PartialResult}
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.storage.{BlockManager, BlockManagerMaster}
+import org.apache.spark.util.{MetadataCleaner, TimeStampedHashMap}
/**
* The high-level scheduling layer that implements stage-oriented scheduling. It computes a DAG of
diff --git a/core/src/main/scala/spark/scheduler/DAGSchedulerEvent.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
index b8ba0e9239..5b07933eed 100644
--- a/core/src/main/scala/spark/scheduler/DAGSchedulerEvent.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.util.Properties
-import spark.scheduler.cluster.TaskInfo
+import org.apache.spark.scheduler.cluster.TaskInfo
import scala.collection.mutable.Map
-import spark._
-import spark.executor.TaskMetrics
+import org.apache.spark._
+import org.apache.spark.executor.TaskMetrics
/**
* Types of events that can be handled by the DAGScheduler. The DAGScheduler uses an event queue
diff --git a/core/src/main/scala/spark/scheduler/DAGSchedulerSource.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
index 98c4fb7e59..ce0dc9093d 100644
--- a/core/src/main/scala/spark/scheduler/DAGSchedulerSource.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala
@@ -1,8 +1,8 @@
-package spark.scheduler
+package org.apache.spark.scheduler
import com.codahale.metrics.{Gauge,MetricRegistry}
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
private[spark] class DAGSchedulerSource(val dagScheduler: DAGScheduler) extends Source {
val metricRegistry = new MetricRegistry()
diff --git a/core/src/main/scala/spark/scheduler/InputFormatInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
index 8f1b9b29b5..370ccd183c 100644
--- a/core/src/main/scala/spark/scheduler/InputFormatInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
-import spark.{Logging, SparkEnv}
+import org.apache.spark.{Logging, SparkEnv}
import scala.collection.immutable.Set
import org.apache.hadoop.mapred.{FileInputFormat, JobConf}
import org.apache.hadoop.security.UserGroupInformation
diff --git a/core/src/main/scala/spark/scheduler/JobListener.scala b/core/src/main/scala/org/apache/spark/scheduler/JobListener.scala
index af108b8fec..50c2b9acd6 100644
--- a/core/src/main/scala/spark/scheduler/JobListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobListener.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
/**
* Interface used to listen for job completion or failure events after submitting a job to the
diff --git a/core/src/main/scala/spark/scheduler/JobLogger.scala b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
index 1bc9fabdff..98ef4d1e63 100644
--- a/core/src/main/scala/spark/scheduler/JobLogger.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.io.PrintWriter
import java.io.File
@@ -27,9 +27,9 @@ import java.util.concurrent.LinkedBlockingQueue
import scala.collection.mutable.{Map, HashMap, ListBuffer}
import scala.io.Source
-import spark._
-import spark.executor.TaskMetrics
-import spark.scheduler.cluster.TaskInfo
+import org.apache.spark._
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.scheduler.cluster.TaskInfo
// Used to record runtime information for each job, including RDD graph
// tasks' start/stop shuffle information and information from outside
diff --git a/core/src/main/scala/spark/scheduler/JobResult.scala b/core/src/main/scala/org/apache/spark/scheduler/JobResult.scala
index a61b335152..c381348a8d 100644
--- a/core/src/main/scala/spark/scheduler/JobResult.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobResult.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
/**
* A result of a job in the DAGScheduler.
diff --git a/core/src/main/scala/spark/scheduler/JobWaiter.scala b/core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala
index 69cd161c1f..200d881799 100644
--- a/core/src/main/scala/spark/scheduler/JobWaiter.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import scala.collection.mutable.ArrayBuffer
diff --git a/core/src/main/scala/spark/scheduler/MapStatus.scala b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
index 2f6a68ee85..1c61687f28 100644
--- a/core/src/main/scala/spark/scheduler/MapStatus.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
-import spark.storage.BlockManagerId
+import org.apache.spark.storage.BlockManagerId
import java.io.{ObjectOutput, ObjectInput, Externalizable}
/**
diff --git a/core/src/main/scala/spark/scheduler/ResultTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
index d066df5dc1..2f157ccdd2 100644
--- a/core/src/main/scala/spark/scheduler/ResultTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
-import spark._
+import org.apache.spark._
import java.io._
import util.{MetadataCleaner, TimeStampedHashMap}
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
diff --git a/core/src/main/scala/spark/scheduler/ShuffleMapTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
index f2a038576b..ca716b44e8 100644
--- a/core/src/main/scala/spark/scheduler/ShuffleMapTask.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala
@@ -15,17 +15,17 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.io._
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
import scala.collection.mutable.HashMap
-import spark._
-import spark.executor.ShuffleWriteMetrics
-import spark.storage._
-import spark.util.{TimeStampedHashMap, MetadataCleaner}
+import org.apache.spark._
+import org.apache.spark.executor.ShuffleWriteMetrics
+import org.apache.spark.storage._
+import org.apache.spark.util.{TimeStampedHashMap, MetadataCleaner}
private[spark] object ShuffleMapTask {
diff --git a/core/src/main/scala/spark/scheduler/SparkListener.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
index e5531011c2..3504424fa9 100644
--- a/core/src/main/scala/spark/scheduler/SparkListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.util.Properties
-import spark.scheduler.cluster.TaskInfo
-import spark.util.Distribution
-import spark.{Logging, SparkContext, TaskEndReason, Utils}
-import spark.executor.TaskMetrics
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.util.Distribution
+import org.apache.spark.{Logging, SparkContext, TaskEndReason, Utils}
+import org.apache.spark.executor.TaskMetrics
sealed trait SparkListenerEvents
@@ -79,7 +79,7 @@ trait SparkListener {
*/
class StatsReportListener extends SparkListener with Logging {
override def onStageCompleted(stageCompleted: StageCompleted) {
- import spark.scheduler.StatsReportListener._
+ import org.apache.spark.scheduler.StatsReportListener._
implicit val sc = stageCompleted
this.logInfo("Finished stage: " + stageCompleted.stageInfo)
showMillisDistribution("task runtime:", (info, _) => Some(info.duration))
diff --git a/core/src/main/scala/spark/scheduler/SparkListenerBus.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala
index f55ed455ed..a65e1ecd6d 100644
--- a/core/src/main/scala/spark/scheduler/SparkListenerBus.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListenerBus.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.util.concurrent.LinkedBlockingQueue
import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer}
-import spark.Logging
+import org.apache.spark.Logging
/** Asynchronously passes SparkListenerEvents to registered SparkListeners. */
private[spark] class SparkListenerBus() extends Logging {
diff --git a/core/src/main/scala/spark/scheduler/SplitInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/SplitInfo.scala
index 4e3661ec5d..5b40a3eb29 100644
--- a/core/src/main/scala/spark/scheduler/SplitInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/SplitInfo.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import collection.mutable.ArrayBuffer
diff --git a/core/src/main/scala/spark/scheduler/Stage.scala b/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
index c599c00ac4..87b1fe4e0c 100644
--- a/core/src/main/scala/spark/scheduler/Stage.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Stage.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.net.URI
-import spark._
-import spark.storage.BlockManagerId
+import org.apache.spark._
+import org.apache.spark.storage.BlockManagerId
/**
* A stage is a set of independent tasks all computing the same function that need to run as part
diff --git a/core/src/main/scala/spark/scheduler/StageInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala
index c4026f995a..72cb1c9ce8 100644
--- a/core/src/main/scala/spark/scheduler/StageInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/StageInfo.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
-import spark.scheduler.cluster.TaskInfo
+import org.apache.spark.scheduler.cluster.TaskInfo
import scala.collection._
-import spark.executor.TaskMetrics
+import org.apache.spark.executor.TaskMetrics
case class StageInfo(
val stage: Stage,
diff --git a/core/src/main/scala/spark/scheduler/Task.scala b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
index 0ab2ae6cfe..598d91752a 100644
--- a/core/src/main/scala/spark/scheduler/Task.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/Task.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
-import spark.serializer.SerializerInstance
+import org.apache.spark.serializer.SerializerInstance
import java.io.{DataInputStream, DataOutputStream}
import java.nio.ByteBuffer
import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
-import spark.util.ByteBufferInputStream
+import org.apache.spark.util.ByteBufferInputStream
import scala.collection.mutable.HashMap
-import spark.executor.TaskMetrics
+import org.apache.spark.executor.TaskMetrics
/**
* A task to execute on a worker node.
diff --git a/core/src/main/scala/spark/scheduler/TaskLocation.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskLocation.scala
index fea117e956..67c9a6760b 100644
--- a/core/src/main/scala/spark/scheduler/TaskLocation.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskLocation.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
/**
* A location where a task should run. This can either be a host or a (host, executorID) pair.
diff --git a/core/src/main/scala/spark/scheduler/TaskResult.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
index fc4856756b..776675d28c 100644
--- a/core/src/main/scala/spark/scheduler/TaskResult.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.io._
import scala.collection.mutable.Map
-import spark.executor.TaskMetrics
-import spark.{Utils, SparkEnv}
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.{Utils, SparkEnv}
import java.nio.ByteBuffer
// Task result. Also contains updates to accumulator variables.
diff --git a/core/src/main/scala/spark/scheduler/TaskScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
index 4943d58e25..63be8ba3f5 100644
--- a/core/src/main/scala/spark/scheduler/TaskScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskScheduler.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
-import spark.scheduler.cluster.Pool
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.scheduler.cluster.Pool
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
/**
* Low-level task scheduler interface, implemented by both ClusterScheduler and LocalScheduler.
* These schedulers get sets of tasks submitted to them from the DAGScheduler for each stage,
diff --git a/core/src/main/scala/spark/scheduler/TaskSchedulerListener.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerListener.scala
index 64be50b2d0..83be051c1a 100644
--- a/core/src/main/scala/spark/scheduler/TaskSchedulerListener.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerListener.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
-import spark.scheduler.cluster.TaskInfo
+import org.apache.spark.scheduler.cluster.TaskInfo
import scala.collection.mutable.Map
-import spark.TaskEndReason
-import spark.executor.TaskMetrics
+import org.apache.spark.TaskEndReason
+import org.apache.spark.executor.TaskMetrics
/**
* Interface for getting events back from the TaskScheduler.
diff --git a/core/src/main/scala/spark/scheduler/TaskSet.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSet.scala
index dc3550dd0b..c3ad325156 100644
--- a/core/src/main/scala/spark/scheduler/TaskSet.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSet.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.util.Properties
diff --git a/core/src/main/scala/spark/scheduler/cluster/ClusterScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterScheduler.scala
index 679d899b47..3196ab5022 100644
--- a/core/src/main/scala/spark/scheduler/cluster/ClusterScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterScheduler.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import java.lang.{Boolean => JBoolean}
@@ -23,10 +23,10 @@ import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
-import spark._
-import spark.TaskState.TaskState
-import spark.scheduler._
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark._
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
import java.nio.ByteBuffer
import java.util.concurrent.atomic.AtomicLong
import java.util.{TimerTask, Timer}
diff --git a/core/src/main/scala/spark/scheduler/cluster/ClusterTaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManager.scala
index a4d6880abb..a33307b83a 100644
--- a/core/src/main/scala/spark/scheduler/cluster/ClusterTaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManager.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import java.nio.ByteBuffer
import java.util.{Arrays, NoSuchElementException}
@@ -26,15 +26,15 @@ import scala.collection.mutable.HashSet
import scala.math.max
import scala.math.min
-import spark.{FetchFailed, Logging, Resubmitted, SparkEnv, Success, TaskEndReason, TaskState, Utils}
-import spark.{ExceptionFailure, SparkException, TaskResultTooBigFailure}
-import spark.TaskState.TaskState
-import spark.scheduler._
+import org.apache.spark.{FetchFailed, Logging, Resubmitted, SparkEnv, Success, TaskEndReason, TaskState, Utils}
+import org.apache.spark.{ExceptionFailure, SparkException, TaskResultTooBigFailure}
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler._
import scala.Some
-import spark.FetchFailed
-import spark.ExceptionFailure
-import spark.TaskResultTooBigFailure
-import spark.util.{SystemClock, Clock}
+import org.apache.spark.FetchFailed
+import org.apache.spark.ExceptionFailure
+import org.apache.spark.TaskResultTooBigFailure
+import org.apache.spark.util.{SystemClock, Clock}
/**
diff --git a/core/src/main/scala/spark/scheduler/cluster/ExecutorLossReason.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/ExecutorLossReason.scala
index 8825f2dd24..5077b2b48b 100644
--- a/core/src/main/scala/spark/scheduler/cluster/ExecutorLossReason.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/ExecutorLossReason.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
-import spark.executor.ExecutorExitCode
+import org.apache.spark.executor.ExecutorExitCode
/**
* Represents an explanation for a executor or whole slave failing or exiting.
diff --git a/core/src/main/scala/spark/scheduler/cluster/Pool.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/Pool.scala
index 83708f07e1..35b32600da 100644
--- a/core/src/main/scala/spark/scheduler/cluster/Pool.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/Pool.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
-import spark.Logging
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.Logging
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
/**
* An Schedulable entity that represent collection of Pools or TaskSetManagers
diff --git a/core/src/main/scala/spark/scheduler/cluster/Schedulable.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/Schedulable.scala
index e77e8e4162..f4726450ec 100644
--- a/core/src/main/scala/spark/scheduler/cluster/Schedulable.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/Schedulable.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
import scala.collection.mutable.ArrayBuffer
/**
diff --git a/core/src/main/scala/spark/scheduler/cluster/SchedulableBuilder.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulableBuilder.scala
index 2fc8a76a05..d04eeb6b98 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SchedulableBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulableBuilder.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import java.io.{File, FileInputStream, FileOutputStream, FileNotFoundException}
import java.util.Properties
import scala.xml.XML
-import spark.Logging
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.Logging
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
/**
diff --git a/core/src/main/scala/spark/scheduler/cluster/SchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulerBackend.scala
index 4431744ec3..bde2f73df4 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulerBackend.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
-import spark.{SparkContext, Utils}
+import org.apache.spark.{SparkContext, Utils}
/**
* A backend interface for cluster scheduling systems that allows plugging in different ones under
diff --git a/core/src/main/scala/spark/scheduler/cluster/SchedulingAlgorithm.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingAlgorithm.scala
index 69e0ac2a6b..cbeed4731a 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SchedulingAlgorithm.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingAlgorithm.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
/**
* An interface for sort algorithm
diff --git a/core/src/main/scala/spark/scheduler/cluster/SchedulingMode.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingMode.scala
index 55cdf4791f..34811389a0 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SchedulingMode.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SchedulingMode.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
/**
* "FAIR" and "FIFO" determines which policy is used
diff --git a/core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index 7ac574bdc8..ac6dc7d879 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
-import spark.{Utils, Logging, SparkContext}
-import spark.deploy.client.{Client, ClientListener}
-import spark.deploy.{Command, ApplicationDescription}
+import org.apache.spark.{Utils, Logging, SparkContext}
+import org.apache.spark.deploy.client.{Client, ClientListener}
+import org.apache.spark.deploy.{Command, ApplicationDescription}
import scala.collection.mutable.HashMap
private[spark] class SparkDeploySchedulerBackend(
@@ -45,7 +45,8 @@ private[spark] class SparkDeploySchedulerBackend(
System.getProperty("spark.driver.host"), System.getProperty("spark.driver.port"),
StandaloneSchedulerBackend.ACTOR_NAME)
val args = Seq(driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}", "{{CORES}}")
- val command = Command("spark.executor.StandaloneExecutorBackend", args, sc.executorEnvs)
+ val command = Command(
+ "org.apache.spark.executor.StandaloneExecutorBackend", args, sc.executorEnvs)
val sparkHome = sc.getSparkHome().getOrElse(null)
val appDesc = new ApplicationDescription(appName, maxCores, executorMemory, command, sparkHome,
sc.ui.appUIAddress)
diff --git a/core/src/main/scala/spark/scheduler/cluster/StandaloneClusterMessage.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneClusterMessage.scala
index 05c29eb72f..1cc5daf673 100644
--- a/core/src/main/scala/spark/scheduler/cluster/StandaloneClusterMessage.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneClusterMessage.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import java.nio.ByteBuffer
-import spark.TaskState.TaskState
-import spark.Utils
-import spark.util.SerializableBuffer
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.Utils
+import org.apache.spark.util.SerializableBuffer
private[spark] sealed trait StandaloneClusterMessage extends Serializable
diff --git a/core/src/main/scala/spark/scheduler/cluster/StandaloneSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
index 3203be1029..3677a827e0 100644
--- a/core/src/main/scala/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import java.util.concurrent.atomic.AtomicInteger
@@ -28,8 +28,8 @@ import akka.remote.{RemoteClientShutdown, RemoteClientDisconnected, RemoteClient
import akka.util.Duration
import akka.util.duration._
-import spark.{Utils, SparkException, Logging, TaskState}
-import spark.scheduler.cluster.StandaloneClusterMessages._
+import org.apache.spark.{Utils, SparkException, Logging, TaskState}
+import org.apache.spark.scheduler.cluster.StandaloneClusterMessages._
/**
* A standalone scheduler backend, which waits for standalone executors to connect to it through
diff --git a/core/src/main/scala/spark/scheduler/cluster/TaskDescription.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskDescription.scala
index 187553233f..309ac2f6c9 100644
--- a/core/src/main/scala/spark/scheduler/cluster/TaskDescription.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskDescription.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import java.nio.ByteBuffer
-import spark.util.SerializableBuffer
+import org.apache.spark.util.SerializableBuffer
private[spark] class TaskDescription(
val taskId: Long,
diff --git a/core/src/main/scala/spark/scheduler/cluster/TaskInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskInfo.scala
index c2c5522686..7ce14be7fb 100644
--- a/core/src/main/scala/spark/scheduler/cluster/TaskInfo.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskInfo.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
-import spark.Utils
+import org.apache.spark.Utils
/**
* Information about a running task attempt inside a TaskSet.
diff --git a/core/src/main/scala/spark/scheduler/cluster/TaskLocality.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskLocality.scala
index 1c33e41f87..5d4130e14a 100644
--- a/core/src/main/scala/spark/scheduler/cluster/TaskLocality.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskLocality.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
private[spark] object TaskLocality
diff --git a/core/src/main/scala/spark/scheduler/cluster/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskSetManager.scala
index 0248830b7a..648a3ef922 100644
--- a/core/src/main/scala/spark/scheduler/cluster/TaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/TaskSetManager.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import java.nio.ByteBuffer
-import spark.TaskState.TaskState
-import spark.scheduler.TaskSet
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler.TaskSet
/**
* Tracks and schedules the tasks within a single TaskSet. This class keeps track of the status of
diff --git a/core/src/main/scala/spark/scheduler/cluster/WorkerOffer.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/WorkerOffer.scala
index 1d09bd9b03..938f62883a 100644
--- a/core/src/main/scala/spark/scheduler/cluster/WorkerOffer.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/WorkerOffer.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
/**
* Represents free resources available on an executor.
diff --git a/core/src/main/scala/spark/scheduler/local/LocalScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/local/LocalScheduler.scala
index 5be4dbd9f0..f0ebe66d82 100644
--- a/core/src/main/scala/spark/scheduler/local/LocalScheduler.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/local/LocalScheduler.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.local
+package org.apache.spark.scheduler.local
import java.io.File
import java.lang.management.ManagementFactory
@@ -27,12 +27,12 @@ import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
-import spark._
-import spark.TaskState.TaskState
-import spark.executor.ExecutorURLClassLoader
-import spark.scheduler._
-import spark.scheduler.cluster._
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark._
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.executor.ExecutorURLClassLoader
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster._
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
import akka.actor._
/**
diff --git a/core/src/main/scala/spark/scheduler/local/LocalTaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala
index e237f289e3..e52cb998bd 100644
--- a/core/src/main/scala/spark/scheduler/local/LocalTaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package spark.scheduler.local
+package org.apache.spark.scheduler.local
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
-import spark.{ExceptionFailure, Logging, SparkEnv, Success, TaskState}
-import spark.TaskState.TaskState
-import spark.scheduler.{Task, TaskResult, TaskSet}
-import spark.scheduler.cluster.{Schedulable, TaskDescription, TaskInfo, TaskLocality, TaskSetManager}
+import org.apache.spark.{ExceptionFailure, Logging, SparkEnv, Success, TaskState}
+import org.apache.spark.TaskState.TaskState
+import org.apache.spark.scheduler.{Task, TaskResult, TaskSet}
+import org.apache.spark.scheduler.cluster.{Schedulable, TaskDescription, TaskInfo, TaskLocality, TaskSetManager}
private[spark] class LocalTaskSetManager(sched: LocalScheduler, val taskSet: TaskSet)
diff --git a/core/src/main/scala/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala
index eef3ee1425..f6a2feab28 100644
--- a/core/src/main/scala/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/mesos/CoarseMesosSchedulerBackend.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.mesos
+package org.apache.spark.scheduler.mesos
import com.google.protobuf.ByteString
@@ -23,14 +23,14 @@ import org.apache.mesos.{Scheduler => MScheduler}
import org.apache.mesos._
import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _}
-import spark.{SparkException, Utils, Logging, SparkContext}
+import org.apache.spark.{SparkException, Utils, Logging, SparkContext}
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.collection.JavaConversions._
import java.io.File
-import spark.scheduler.cluster._
+import org.apache.spark.scheduler.cluster._
import java.util.{ArrayList => JArrayList, List => JList}
import java.util.Collections
-import spark.TaskState
+import org.apache.spark.TaskState
/**
* A SchedulerBackend that runs tasks on Mesos, but uses "coarse-grained" tasks, where it holds
@@ -126,14 +126,16 @@ private[spark] class CoarseMesosSchedulerBackend(
val uri = System.getProperty("spark.executor.uri")
if (uri == null) {
val runScript = new File(sparkHome, "spark-class").getCanonicalPath
- command.setValue("\"%s\" spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
- runScript, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
+ command.setValue(
+ "\"%s\" org.apache.spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
+ runScript, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
} else {
// Grab everything to the first '.'. We'll use that and '*' to
// glob the directory "correctly".
val basename = uri.split('/').last.split('.').head
- command.setValue("cd %s*; ./spark-class spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
- basename, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
+ command.setValue(
+ "cd %s*; ./spark-class org.apache.spark.executor.StandaloneExecutorBackend %s %s %s %d".format(
+ basename, driverUrl, offer.getSlaveId.getValue, offer.getHostname, numCores))
command.addUris(CommandInfo.URI.newBuilder().setValue(uri))
}
return command.build()
diff --git a/core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/mesos/MesosSchedulerBackend.scala
index f6069a5775..e002af1742 100644
--- a/core/src/main/scala/spark/scheduler/mesos/MesosSchedulerBackend.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/mesos/MesosSchedulerBackend.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.scheduler.mesos
+package org.apache.spark.scheduler.mesos
import com.google.protobuf.ByteString
@@ -23,14 +23,14 @@ import org.apache.mesos.{Scheduler => MScheduler}
import org.apache.mesos._
import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _}
-import spark.{SparkException, Utils, Logging, SparkContext}
+import org.apache.spark.{SparkException, Utils, Logging, SparkContext}
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.collection.JavaConversions._
import java.io.File
-import spark.scheduler.cluster._
+import org.apache.spark.scheduler.cluster._
import java.util.{ArrayList => JArrayList, List => JList}
import java.util.Collections
-import spark.TaskState
+import org.apache.spark.TaskState
/**
* A SchedulerBackend for running fine-grained tasks on Mesos. Each Spark task is mapped to a
diff --git a/core/src/main/scala/spark/serializer/Serializer.scala b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
index dc94d42bb6..160cca4d6c 100644
--- a/core/src/main/scala/spark/serializer/Serializer.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala
@@ -15,19 +15,19 @@
* limitations under the License.
*/
-package spark.serializer
+package org.apache.spark.serializer
import java.io.{EOFException, InputStream, OutputStream}
import java.nio.ByteBuffer
import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
-import spark.util.ByteBufferInputStream
+import org.apache.spark.util.{NextIterator, ByteBufferInputStream}
/**
* A serializer. Because some serialization libraries are not thread safe, this class is used to
- * create [[spark.serializer.SerializerInstance]] objects that do the actual serialization and are
+ * create [[org.apache.spark.serializer.SerializerInstance]] objects that do the actual serialization and are
* guaranteed to only be called from one thread at a time.
*/
trait Serializer {
@@ -95,7 +95,7 @@ trait DeserializationStream {
* Read the elements of this stream through an iterator. This can only be called once, as
* reading each element will consume data from the input source.
*/
- def asIterator: Iterator[Any] = new spark.util.NextIterator[Any] {
+ def asIterator: Iterator[Any] = new NextIterator[Any] {
override protected def getNext() = {
try {
readObject[Any]()
diff --git a/core/src/main/scala/spark/serializer/SerializerManager.scala b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
index b7b24705a2..2955986fec 100644
--- a/core/src/main/scala/spark/serializer/SerializerManager.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.serializer
+package org.apache.spark.serializer
import java.util.concurrent.ConcurrentHashMap
diff --git a/core/src/main/scala/spark/storage/BlockException.scala b/core/src/main/scala/org/apache/spark/storage/BlockException.scala
index 8ebfaf3cbf..290dbce4f5 100644
--- a/core/src/main/scala/spark/storage/BlockException.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockException.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
private[spark]
case class BlockException(blockId: String, message: String) extends Exception(message)
diff --git a/core/src/main/scala/spark/storage/BlockFetchTracker.scala b/core/src/main/scala/org/apache/spark/storage/BlockFetchTracker.scala
index 265e554ad8..2e0b0e6eda 100644
--- a/core/src/main/scala/spark/storage/BlockFetchTracker.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockFetchTracker.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
private[spark] trait BlockFetchTracker {
def totalBlocks : Int
diff --git a/core/src/main/scala/spark/storage/BlockFetcherIterator.scala b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
index 568783d893..c91f0fc1ad 100644
--- a/core/src/main/scala/spark/storage/BlockFetcherIterator.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.nio.ByteBuffer
import java.util.concurrent.LinkedBlockingQueue
@@ -26,13 +26,13 @@ import scala.collection.mutable.Queue
import io.netty.buffer.ByteBuf
-import spark.Logging
-import spark.Utils
-import spark.SparkException
-import spark.network.BufferMessage
-import spark.network.ConnectionManagerId
-import spark.network.netty.ShuffleCopier
-import spark.serializer.Serializer
+import org.apache.spark.Logging
+import org.apache.spark.Utils
+import org.apache.spark.SparkException
+import org.apache.spark.network.BufferMessage
+import org.apache.spark.network.ConnectionManagerId
+import org.apache.spark.network.netty.ShuffleCopier
+import org.apache.spark.serializer.Serializer
/**
diff --git a/core/src/main/scala/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
index 2a6ec2a55d..3299ac98d5 100644
--- a/core/src/main/scala/spark/storage/BlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.io.{InputStream, OutputStream}
import java.nio.{ByteBuffer, MappedByteBuffer}
@@ -29,11 +29,11 @@ import akka.util.duration._
import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
-import spark.{Logging, SparkEnv, SparkException, Utils}
-import spark.io.CompressionCodec
-import spark.network._
-import spark.serializer.Serializer
-import spark.util.{ByteBufferInputStream, IdGenerator, MetadataCleaner, TimeStampedHashMap}
+import org.apache.spark.{Logging, SparkEnv, SparkException, Utils}
+import org.apache.spark.io.CompressionCodec
+import org.apache.spark.network._
+import org.apache.spark.serializer.Serializer
+import org.apache.spark.util.{ByteBufferInputStream, IdGenerator, MetadataCleaner, TimeStampedHashMap}
import sun.nio.ch.DirectBuffer
diff --git a/core/src/main/scala/spark/storage/BlockManagerId.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
index b36a6176c0..a22a80decc 100644
--- a/core/src/main/scala/spark/storage/BlockManagerId.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput}
import java.util.concurrent.ConcurrentHashMap
-import spark.Utils
+import org.apache.spark.Utils
/**
* This class represent an unique identifier for a BlockManager.
@@ -92,13 +92,13 @@ private[spark] class BlockManagerId private (
private[spark] object BlockManagerId {
/**
- * Returns a [[spark.storage.BlockManagerId]] for the given configuraiton.
+ * Returns a [[org.apache.spark.storage.BlockManagerId]] for the given configuraiton.
*
* @param execId ID of the executor.
* @param host Host name of the block manager.
* @param port Port of the block manager.
* @param nettyPort Optional port for the Netty-based shuffle sender.
- * @return A new [[spark.storage.BlockManagerId]].
+ * @return A new [[org.apache.spark.storage.BlockManagerId]].
*/
def apply(execId: String, host: String, port: Int, nettyPort: Int) =
getCachedBlockManagerId(new BlockManagerId(execId, host, port, nettyPort))
diff --git a/core/src/main/scala/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
index 76128e8cff..cf463d6ffc 100644
--- a/core/src/main/scala/spark/storage/BlockManagerMaster.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import akka.actor.ActorRef
import akka.dispatch.{Await, Future}
import akka.pattern.ask
import akka.util.Duration
-import spark.{Logging, SparkException}
-import spark.storage.BlockManagerMessages._
+import org.apache.spark.{Logging, SparkException}
+import org.apache.spark.storage.BlockManagerMessages._
private[spark] class BlockManagerMaster(var driverActor: ActorRef) extends Logging {
diff --git a/core/src/main/scala/spark/storage/BlockManagerMasterActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
index b7a981d101..baa4a1da50 100644
--- a/core/src/main/scala/spark/storage/BlockManagerMasterActor.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.util.{HashMap => JHashMap}
@@ -28,8 +28,8 @@ import akka.pattern.ask
import akka.util.Duration
import akka.util.duration._
-import spark.{Logging, Utils, SparkException}
-import spark.storage.BlockManagerMessages._
+import org.apache.spark.{Logging, Utils, SparkException}
+import org.apache.spark.storage.BlockManagerMessages._
/**
diff --git a/core/src/main/scala/spark/storage/BlockManagerMessages.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
index 9375a9ca54..24333a179c 100644
--- a/core/src/main/scala/spark/storage/BlockManagerMessages.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.io.{Externalizable, ObjectInput, ObjectOutput}
diff --git a/core/src/main/scala/spark/storage/BlockManagerSlaveActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
index 6e5fb43732..951503019f 100644
--- a/core/src/main/scala/spark/storage/BlockManagerSlaveActor.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import akka.actor.Actor
-import spark.storage.BlockManagerMessages._
+import org.apache.spark.storage.BlockManagerMessages._
/**
diff --git a/core/src/main/scala/spark/storage/BlockManagerSource.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
index 2aecd1ea71..24190cdd67 100644
--- a/core/src/main/scala/spark/storage/BlockManagerSource.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala
@@ -1,8 +1,8 @@
-package spark.storage
+package org.apache.spark.storage
import com.codahale.metrics.{Gauge,MetricRegistry}
-import spark.metrics.source.Source
+import org.apache.spark.metrics.source.Source
private[spark] class BlockManagerSource(val blockManager: BlockManager) extends Source {
diff --git a/core/src/main/scala/spark/storage/BlockManagerWorker.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
index 39064bce92..f4856020e5 100644
--- a/core/src/main/scala/spark/storage/BlockManagerWorker.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.nio.ByteBuffer
-import spark.{Logging, Utils}
-import spark.network._
+import org.apache.spark.{Logging, Utils}
+import org.apache.spark.network._
/**
* A network interface for BlockManager. Each slave should have one
diff --git a/core/src/main/scala/spark/storage/BlockMessage.scala b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
index bcce26b7c1..d8fa6a91d1 100644
--- a/core/src/main/scala/spark/storage/BlockMessage.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.nio.ByteBuffer
import scala.collection.mutable.StringBuilder
import scala.collection.mutable.ArrayBuffer
-import spark.network._
+import org.apache.spark.network._
private[spark] case class GetBlock(id: String)
private[spark] case class GotBlock(id: String, data: ByteBuffer)
diff --git a/core/src/main/scala/spark/storage/BlockMessageArray.scala b/core/src/main/scala/org/apache/spark/storage/BlockMessageArray.scala
index ee2fc167d5..0aaf846b5b 100644
--- a/core/src/main/scala/spark/storage/BlockMessageArray.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockMessageArray.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
-import spark._
-import spark.network._
+import org.apache.spark._
+import org.apache.spark.network._
private[spark]
class BlockMessageArray(var blockMessages: Seq[BlockMessage]) extends Seq[BlockMessage] with Logging {
diff --git a/core/src/main/scala/spark/storage/BlockObjectWriter.scala b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala
index 3812009ca1..39f103297f 100644
--- a/core/src/main/scala/spark/storage/BlockObjectWriter.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
/**
diff --git a/core/src/main/scala/spark/storage/BlockStore.scala b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
index c8db0022b0..fa834371f4 100644
--- a/core/src/main/scala/spark/storage/BlockStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
-import spark.Logging
+import org.apache.spark.Logging
/**
* Abstract class to store blocks
diff --git a/core/src/main/scala/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
index b14497157e..fd945e065c 100644
--- a/core/src/main/scala/spark/storage/DiskStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.io.{File, FileOutputStream, OutputStream, RandomAccessFile}
import java.nio.ByteBuffer
@@ -28,12 +28,12 @@ import scala.collection.mutable.ArrayBuffer
import it.unimi.dsi.fastutil.io.FastBufferedOutputStream
-import spark.Utils
-import spark.executor.ExecutorExitCode
-import spark.serializer.{Serializer, SerializationStream}
-import spark.Logging
-import spark.network.netty.ShuffleSender
-import spark.network.netty.PathResolver
+import org.apache.spark.Utils
+import org.apache.spark.executor.ExecutorExitCode
+import org.apache.spark.serializer.{Serializer, SerializationStream}
+import org.apache.spark.Logging
+import org.apache.spark.network.netty.ShuffleSender
+import org.apache.spark.network.netty.PathResolver
/**
diff --git a/core/src/main/scala/spark/storage/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
index 5a51f5cf31..828dc0f22d 100644
--- a/core/src/main/scala/spark/storage/MemoryStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.util.LinkedHashMap
import java.util.concurrent.ArrayBlockingQueue
-import spark.{SizeEstimator, Utils}
+import org.apache.spark.{SizeEstimator, Utils}
import java.nio.ByteBuffer
import collection.mutable.ArrayBuffer
diff --git a/core/src/main/scala/spark/storage/PutResult.scala b/core/src/main/scala/org/apache/spark/storage/PutResult.scala
index 3a0974fe15..2eba2f06b5 100644
--- a/core/src/main/scala/spark/storage/PutResult.scala
+++ b/core/src/main/scala/org/apache/spark/storage/PutResult.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.nio.ByteBuffer
diff --git a/core/src/main/scala/spark/storage/ShuffleBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
index 8a7a6f9ed3..9da11efb57 100644
--- a/core/src/main/scala/spark/storage/ShuffleBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/ShuffleBlockManager.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
-import spark.serializer.Serializer
+import org.apache.spark.serializer.Serializer
private[spark]
diff --git a/core/src/main/scala/spark/storage/StorageLevel.scala b/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
index f52650988c..755f1a760e 100644
--- a/core/src/main/scala/spark/storage/StorageLevel.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StorageLevel.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput}
@@ -23,7 +23,7 @@ import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput}
* Flags for controlling the storage of an RDD. Each StorageLevel records whether to use memory,
* whether to drop the RDD to disk if it falls out of memory, whether to keep the data in memory
* in a serialized format, and whether to replicate the RDD partitions on multiple nodes.
- * The [[spark.storage.StorageLevel$]] singleton object contains some static constants for
+ * The [[org.apache.spark.storage.StorageLevel$]] singleton object contains some static constants for
* commonly useful storage levels. To create your own storage level object, use the factor method
* of the singleton object (`StorageLevel(...)`).
*/
diff --git a/core/src/main/scala/spark/storage/StorageUtils.scala b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
index 123b8f6345..0bba1dac54 100644
--- a/core/src/main/scala/spark/storage/StorageUtils.scala
+++ b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
-import spark.{Utils, SparkContext}
+import org.apache.spark.{Utils, SparkContext}
import BlockManagerMasterActor.BlockStatus
private[spark]
diff --git a/core/src/main/scala/spark/storage/ThreadingTest.scala b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
index b3ab1ff4b4..1d5afe9b08 100644
--- a/core/src/main/scala/spark/storage/ThreadingTest.scala
+++ b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import akka.actor._
-import spark.KryoSerializer
+import org.apache.spark.KryoSerializer
import java.util.concurrent.ArrayBlockingQueue
import util.Random
diff --git a/core/src/main/scala/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index f66fe39905..cfa18f6ea4 100644
--- a/core/src/main/scala/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.ui
+package org.apache.spark.ui
import javax.servlet.http.{HttpServletResponse, HttpServletRequest}
@@ -29,7 +29,7 @@ import org.eclipse.jetty.server.{Server, Request, Handler}
import org.eclipse.jetty.server.handler.{ResourceHandler, HandlerList, ContextHandler, AbstractHandler}
import org.eclipse.jetty.util.thread.QueuedThreadPool
-import spark.Logging
+import org.apache.spark.Logging
/** Utilities for launching a web server using Jetty's HTTP Server class */
diff --git a/core/src/main/scala/spark/ui/Page.scala b/core/src/main/scala/org/apache/spark/ui/Page.scala
index 87376a19d8..b2a069a375 100644
--- a/core/src/main/scala/spark/ui/Page.scala
+++ b/core/src/main/scala/org/apache/spark/ui/Page.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.ui
+package org.apache.spark.ui
private[spark] object Page extends Enumeration {
val Stages, Storage, Environment, Executors = Value
diff --git a/core/src/main/scala/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index 23ded44ba3..4688effe0a 100644
--- a/core/src/main/scala/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -15,18 +15,18 @@
* limitations under the License.
*/
-package spark.ui
+package org.apache.spark.ui
import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.server.{Handler, Server}
-import spark.{Logging, SparkContext, SparkEnv, Utils}
-import spark.ui.env.EnvironmentUI
-import spark.ui.exec.ExecutorsUI
-import spark.ui.storage.BlockManagerUI
-import spark.ui.jobs.JobProgressUI
-import spark.ui.JettyUtils._
+import org.apache.spark.{Logging, SparkContext, SparkEnv, Utils}
+import org.apache.spark.ui.env.EnvironmentUI
+import org.apache.spark.ui.exec.ExecutorsUI
+import org.apache.spark.ui.storage.BlockManagerUI
+import org.apache.spark.ui.jobs.JobProgressUI
+import org.apache.spark.ui.JettyUtils._
/** Top level user interface for Spark */
private[spark] class SparkUI(sc: SparkContext) extends Logging {
@@ -83,5 +83,5 @@ private[spark] class SparkUI(sc: SparkContext) extends Logging {
private[spark] object SparkUI {
val DEFAULT_PORT = "3030"
- val STATIC_RESOURCE_DIR = "spark/ui/static"
+ val STATIC_RESOURCE_DIR = "org/apache/spark/ui/static"
}
diff --git a/core/src/main/scala/spark/ui/UIUtils.scala b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
index 51bb18d888..ce1acf564c 100644
--- a/core/src/main/scala/spark/ui/UIUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIUtils.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.ui
+package org.apache.spark.ui
import scala.xml.Node
-import spark.SparkContext
+import org.apache.spark.SparkContext
/** Utility functions for generating XML pages with spark content. */
private[spark] object UIUtils {
diff --git a/core/src/main/scala/spark/ui/UIWorkloadGenerator.scala b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
index 5ff0572f0a..0ecb22d2f9 100644
--- a/core/src/main/scala/spark/ui/UIWorkloadGenerator.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.ui
+package org.apache.spark.ui
import scala.util.Random
-import spark.SparkContext
-import spark.SparkContext._
-import spark.scheduler.cluster.SchedulingMode
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.scheduler.cluster.SchedulingMode
/**
diff --git a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
index b1be1a27ef..c5bf2acc9e 100644
--- a/core/src/main/scala/spark/ui/env/EnvironmentUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.ui.env
+package org.apache.spark.ui.env
import javax.servlet.http.HttpServletRequest
@@ -25,10 +25,10 @@ import scala.xml.Node
import org.eclipse.jetty.server.Handler
-import spark.ui.JettyUtils._
-import spark.ui.UIUtils
-import spark.ui.Page.Environment
-import spark.SparkContext
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.ui.UIUtils
+import org.apache.spark.ui.Page.Environment
+import org.apache.spark.SparkContext
private[spark] class EnvironmentUI(sc: SparkContext) {
diff --git a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
index 0a7021fbf8..efe6b474e0 100644
--- a/core/src/main/scala/spark/ui/exec/ExecutorsUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala
@@ -1,4 +1,4 @@
-package spark.ui.exec
+package org.apache.spark.ui.exec
import javax.servlet.http.HttpServletRequest
@@ -7,13 +7,13 @@ import scala.xml.Node
import org.eclipse.jetty.server.Handler
-import spark.{ExceptionFailure, Logging, Utils, SparkContext}
-import spark.executor.TaskMetrics
-import spark.scheduler.cluster.TaskInfo
-import spark.scheduler.{SparkListenerTaskStart, SparkListenerTaskEnd, SparkListener}
-import spark.ui.JettyUtils._
-import spark.ui.Page.Executors
-import spark.ui.UIUtils
+import org.apache.spark.{ExceptionFailure, Logging, Utils, SparkContext}
+import org.apache.spark.executor.TaskMetrics
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.scheduler.{SparkListenerTaskStart, SparkListenerTaskEnd, SparkListener}
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.ui.Page.Executors
+import org.apache.spark.ui.UIUtils
private[spark] class ExecutorsUI(val sc: SparkContext) {
diff --git a/core/src/main/scala/spark/ui/jobs/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
index 8867a6c90c..3b428effaf 100644
--- a/core/src/main/scala/spark/ui/jobs/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
import javax.servlet.http.HttpServletRequest
import scala.xml.{NodeSeq, Node}
-import spark.scheduler.cluster.SchedulingMode
-import spark.ui.Page._
-import spark.ui.UIUtils._
+import org.apache.spark.scheduler.cluster.SchedulingMode
+import org.apache.spark.ui.Page._
+import org.apache.spark.ui.UIUtils._
/** Page showing list of all ongoing and recently finished stages and pools*/
diff --git a/core/src/main/scala/spark/ui/jobs/JobProgressListener.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
index 1d9767a83c..ae02226300 100644
--- a/core/src/main/scala/spark/ui/jobs/JobProgressListener.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
@@ -1,12 +1,12 @@
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
import scala.Seq
import scala.collection.mutable.{ListBuffer, HashMap, HashSet}
-import spark.{ExceptionFailure, SparkContext, Success, Utils}
-import spark.scheduler._
-import spark.scheduler.cluster.TaskInfo
-import spark.executor.TaskMetrics
+import org.apache.spark.{ExceptionFailure, SparkContext, Success, Utils}
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.executor.TaskMetrics
import collection.mutable
/**
diff --git a/core/src/main/scala/spark/ui/jobs/JobProgressUI.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
index c83f102ff3..1bb7638bd9 100644
--- a/core/src/main/scala/spark/ui/jobs/JobProgressUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
import akka.util.Duration
@@ -28,12 +28,12 @@ import org.eclipse.jetty.server.Handler
import scala.Seq
import scala.collection.mutable.{HashSet, ListBuffer, HashMap, ArrayBuffer}
-import spark.ui.JettyUtils._
-import spark.{ExceptionFailure, SparkContext, Success, Utils}
-import spark.scheduler._
+import org.apache.spark.ui.JettyUtils._
+import org.apache.spark.{ExceptionFailure, SparkContext, Success, Utils}
+import org.apache.spark.scheduler._
import collection.mutable
-import spark.scheduler.cluster.SchedulingMode
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.scheduler.cluster.SchedulingMode
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
/** Web UI showing progress status of all jobs in the given SparkContext. */
private[spark] class JobProgressUI(val sc: SparkContext) {
diff --git a/core/src/main/scala/spark/ui/jobs/PoolPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
index 7fb74dce40..ce92b6932b 100644
--- a/core/src/main/scala/spark/ui/jobs/PoolPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala
@@ -1,13 +1,13 @@
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
import javax.servlet.http.HttpServletRequest
import scala.xml.{NodeSeq, Node}
import scala.collection.mutable.HashSet
-import spark.scheduler.Stage
-import spark.ui.UIUtils._
-import spark.ui.Page._
+import org.apache.spark.scheduler.Stage
+import org.apache.spark.ui.UIUtils._
+import org.apache.spark.ui.Page._
/** Page showing specific pool details */
private[spark] class PoolPage(parent: JobProgressUI) {
diff --git a/core/src/main/scala/spark/ui/jobs/PoolTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/PoolTable.scala
index 621828f9c3..f31465e59d 100644
--- a/core/src/main/scala/spark/ui/jobs/PoolTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/PoolTable.scala
@@ -1,11 +1,11 @@
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
import scala.xml.Node
-import spark.scheduler.Stage
-import spark.scheduler.cluster.Schedulable
+import org.apache.spark.scheduler.Stage
+import org.apache.spark.scheduler.cluster.Schedulable
/** Table showing list of pools */
private[spark] class PoolTable(pools: Seq[Schedulable], listener: JobProgressListener) {
diff --git a/core/src/main/scala/spark/ui/jobs/StagePage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
index c2341475c7..2fe85bc0cf 100644
--- a/core/src/main/scala/spark/ui/jobs/StagePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
import java.util.Date
@@ -23,12 +23,12 @@ import javax.servlet.http.HttpServletRequest
import scala.xml.Node
-import spark.ui.UIUtils._
-import spark.ui.Page._
-import spark.util.Distribution
-import spark.{ExceptionFailure, Utils}
-import spark.scheduler.cluster.TaskInfo
-import spark.executor.TaskMetrics
+import org.apache.spark.ui.UIUtils._
+import org.apache.spark.ui.Page._
+import org.apache.spark.util.Distribution
+import org.apache.spark.{ExceptionFailure, Utils}
+import org.apache.spark.scheduler.cluster.TaskInfo
+import org.apache.spark.executor.TaskMetrics
/** Page showing statistics and task list for a given stage */
private[spark] class StagePage(parent: JobProgressUI) {
diff --git a/core/src/main/scala/spark/ui/jobs/StageTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
index 2b1bc984fc..beb0574548 100644
--- a/core/src/main/scala/spark/ui/jobs/StageTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala
@@ -1,13 +1,13 @@
-package spark.ui.jobs
+package org.apache.spark.ui.jobs
import java.util.Date
import scala.xml.Node
import scala.collection.mutable.HashSet
-import spark.Utils
-import spark.scheduler.cluster.{SchedulingMode, TaskInfo}
-import spark.scheduler.Stage
+import org.apache.spark.Utils
+import org.apache.spark.scheduler.cluster.{SchedulingMode, TaskInfo}
+import org.apache.spark.scheduler.Stage
/** Page showing list of all ongoing and recently finished stages */
diff --git a/core/src/main/scala/spark/ui/storage/BlockManagerUI.scala b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
index 49ed069c75..1d633d374a 100644
--- a/core/src/main/scala/spark/ui/storage/BlockManagerUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.ui.storage
+package org.apache.spark.ui.storage
import akka.util.Duration
@@ -23,8 +23,8 @@ import javax.servlet.http.HttpServletRequest
import org.eclipse.jetty.server.Handler
-import spark.{Logging, SparkContext}
-import spark.ui.JettyUtils._
+import org.apache.spark.{Logging, SparkContext}
+import org.apache.spark.ui.JettyUtils._
/** Web UI showing storage status of all RDD's in the given SparkContext. */
private[spark] class BlockManagerUI(val sc: SparkContext) extends Logging {
diff --git a/core/src/main/scala/spark/ui/storage/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
index fc6273c694..1eb4a7a85e 100644
--- a/core/src/main/scala/spark/ui/storage/IndexPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package spark.ui.storage
+package org.apache.spark.ui.storage
import javax.servlet.http.HttpServletRequest
import scala.xml.Node
-import spark.storage.{RDDInfo, StorageUtils}
-import spark.Utils
-import spark.ui.UIUtils._
-import spark.ui.Page._
+import org.apache.spark.storage.{RDDInfo, StorageUtils}
+import org.apache.spark.Utils
+import org.apache.spark.ui.UIUtils._
+import org.apache.spark.ui.Page._
/** Page showing list of RDD's currently stored in the cluster */
private[spark] class IndexPage(parent: BlockManagerUI) {
diff --git a/core/src/main/scala/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
index b128a5614d..37baf17f7a 100644
--- a/core/src/main/scala/spark/ui/storage/RDDPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala
@@ -15,17 +15,17 @@
* limitations under the License.
*/
-package spark.ui.storage
+package org.apache.spark.ui.storage
import javax.servlet.http.HttpServletRequest
import scala.xml.Node
-import spark.Utils
-import spark.storage.{StorageStatus, StorageUtils}
-import spark.storage.BlockManagerMasterActor.BlockStatus
-import spark.ui.UIUtils._
-import spark.ui.Page._
+import org.apache.spark.Utils
+import org.apache.spark.storage.{StorageStatus, StorageUtils}
+import org.apache.spark.storage.BlockManagerMasterActor.BlockStatus
+import org.apache.spark.ui.UIUtils._
+import org.apache.spark.ui.Page._
/** Page showing storage details for a given RDD */
diff --git a/core/src/main/scala/spark/util/AkkaUtils.scala b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
index 9233277bdb..d4c5065c3f 100644
--- a/core/src/main/scala/spark/util/AkkaUtils.scala
+++ b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import akka.actor.{ActorSystem, ExtendedActorSystem}
import com.typesafe.config.ConfigFactory
diff --git a/core/src/main/scala/spark/util/BoundedPriorityQueue.scala b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
index 0575497f5d..0b51c23f7b 100644
--- a/core/src/main/scala/spark/util/BoundedPriorityQueue.scala
+++ b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import java.io.Serializable
import java.util.{PriorityQueue => JPriorityQueue}
diff --git a/core/src/main/scala/spark/util/ByteBufferInputStream.scala b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
index 47a28e2f76..e214d2a519 100644
--- a/core/src/main/scala/spark/util/ByteBufferInputStream.scala
+++ b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import java.io.InputStream
import java.nio.ByteBuffer
-import spark.storage.BlockManager
+import org.apache.spark.storage.BlockManager
/**
* Reads data from a ByteBuffer, and optionally cleans it up using BlockManager.dispose()
diff --git a/core/src/main/scala/spark/util/Clock.scala b/core/src/main/scala/org/apache/spark/util/Clock.scala
index aa71a5b442..97c2b45aab 100644
--- a/core/src/main/scala/spark/util/Clock.scala
+++ b/core/src/main/scala/org/apache/spark/util/Clock.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
/**
* An interface to represent clocks, so that they can be mocked out in unit tests.
diff --git a/core/src/main/scala/spark/util/CompletionIterator.scala b/core/src/main/scala/org/apache/spark/util/CompletionIterator.scala
index 210450892b..dc15a38b29 100644
--- a/core/src/main/scala/spark/util/CompletionIterator.scala
+++ b/core/src/main/scala/org/apache/spark/util/CompletionIterator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
/**
* Wrapper around an iterator which calls a completion method after it successfully iterates through all the elements
diff --git a/core/src/main/scala/spark/util/Distribution.scala b/core/src/main/scala/org/apache/spark/util/Distribution.scala
index 5d4d7a6c50..33bf3562fe 100644
--- a/core/src/main/scala/spark/util/Distribution.scala
+++ b/core/src/main/scala/org/apache/spark/util/Distribution.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import java.io.PrintStream
diff --git a/core/src/main/scala/spark/util/IdGenerator.scala b/core/src/main/scala/org/apache/spark/util/IdGenerator.scala
index 3422280559..17e55f7996 100644
--- a/core/src/main/scala/spark/util/IdGenerator.scala
+++ b/core/src/main/scala/org/apache/spark/util/IdGenerator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import java.util.concurrent.atomic.AtomicInteger
diff --git a/core/src/main/scala/spark/util/IntParam.scala b/core/src/main/scala/org/apache/spark/util/IntParam.scala
index daf0d58fa2..626bb49eea 100644
--- a/core/src/main/scala/spark/util/IntParam.scala
+++ b/core/src/main/scala/org/apache/spark/util/IntParam.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
/**
* An extractor object for parsing strings into integers.
diff --git a/core/src/main/scala/spark/util/MemoryParam.scala b/core/src/main/scala/org/apache/spark/util/MemoryParam.scala
index 298562323a..0ee6707826 100644
--- a/core/src/main/scala/spark/util/MemoryParam.scala
+++ b/core/src/main/scala/org/apache/spark/util/MemoryParam.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
-import spark.Utils
+import org.apache.spark.Utils
/**
* An extractor object for parsing JVM memory strings, such as "10g", into an Int representing
diff --git a/core/src/main/scala/spark/util/MetadataCleaner.scala b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
index 92909e0959..a430a75451 100644
--- a/core/src/main/scala/spark/util/MetadataCleaner.scala
+++ b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import java.util.concurrent.{TimeUnit, ScheduledFuture, Executors}
import java.util.{TimerTask, Timer}
-import spark.Logging
+import org.apache.spark.Logging
/**
diff --git a/core/src/main/scala/spark/util/MutablePair.scala b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
index 78d404e66b..34f1f6606f 100644
--- a/core/src/main/scala/spark/util/MutablePair.scala
+++ b/core/src/main/scala/org/apache/spark/util/MutablePair.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
/**
diff --git a/core/src/main/scala/spark/util/NextIterator.scala b/core/src/main/scala/org/apache/spark/util/NextIterator.scala
index 22163ece8d..8266e5e495 100644
--- a/core/src/main/scala/spark/util/NextIterator.scala
+++ b/core/src/main/scala/org/apache/spark/util/NextIterator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
/** Provides a basic/boilerplate Iterator implementation. */
private[spark] abstract class NextIterator[U] extends Iterator[U] {
diff --git a/core/src/main/scala/spark/util/RateLimitedOutputStream.scala b/core/src/main/scala/org/apache/spark/util/RateLimitedOutputStream.scala
index 00f782bbe7..47e1b45004 100644
--- a/core/src/main/scala/spark/util/RateLimitedOutputStream.scala
+++ b/core/src/main/scala/org/apache/spark/util/RateLimitedOutputStream.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import scala.annotation.tailrec
diff --git a/core/src/main/scala/spark/util/SerializableBuffer.scala b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
index 7e6842628a..f2b1ad7d0e 100644
--- a/core/src/main/scala/spark/util/SerializableBuffer.scala
+++ b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import java.nio.ByteBuffer
import java.io.{IOException, ObjectOutputStream, EOFException, ObjectInputStream}
diff --git a/core/src/main/scala/spark/util/StatCounter.scala b/core/src/main/scala/org/apache/spark/util/StatCounter.scala
index 76358d4151..020d5edba9 100644
--- a/core/src/main/scala/spark/util/StatCounter.scala
+++ b/core/src/main/scala/org/apache/spark/util/StatCounter.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
/**
* A class for tracking the statistics of a set of numbers (count, mean and variance) in a
diff --git a/core/src/main/scala/spark/util/TimeStampedHashMap.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
index 07772a0afb..277de2f8a6 100644
--- a/core/src/main/scala/spark/util/TimeStampedHashMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
@@ -15,13 +15,14 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConversions
import scala.collection.mutable.Map
import scala.collection.immutable
-import spark.scheduler.MapStatus
+import org.apache.spark.scheduler.MapStatus
+import org.apache.spark.Logging
/**
* This is a custom implementation of scala.collection.mutable.Map which stores the insertion
@@ -29,7 +30,7 @@ import spark.scheduler.MapStatus
* threshold time can them be removed using the clearOldValues method. This is intended to be a drop-in
* replacement of scala.collection.mutable.HashMap.
*/
-class TimeStampedHashMap[A, B] extends Map[A, B]() with spark.Logging {
+class TimeStampedHashMap[A, B] extends Map[A, B]() with Logging {
val internalMap = new ConcurrentHashMap[A, (B, Long)]()
def get(key: A): Option[B] = {
diff --git a/core/src/main/scala/spark/util/TimeStampedHashSet.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
index 41e3fd8cba..26983138ff 100644
--- a/core/src/main/scala/spark/util/TimeStampedHashSet.scala
+++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import scala.collection.mutable.Set
import scala.collection.JavaConversions
diff --git a/core/src/main/scala/spark/util/Vector.scala b/core/src/main/scala/org/apache/spark/util/Vector.scala
index a47cac3b96..fe710c58ac 100644
--- a/core/src/main/scala/spark/util/Vector.scala
+++ b/core/src/main/scala/org/apache/spark/util/Vector.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
class Vector(val elements: Array[Double]) extends Serializable {
def length = elements.length
@@ -130,7 +130,7 @@ object Vector {
implicit def doubleToMultiplier(num: Double) = new Multiplier(num)
- implicit object VectorAccumParam extends spark.AccumulatorParam[Vector] {
+ implicit object VectorAccumParam extends org.apache.spark.AccumulatorParam[Vector] {
def addInPlace(t1: Vector, t2: Vector) = t1 + t2
def zero(initialValue: Vector) = Vector.zeros(initialValue.length)
diff --git a/core/src/test/resources/test_metrics_config.properties b/core/src/test/resources/test_metrics_config.properties
index 2b31ddf2eb..056a158456 100644
--- a/core/src/test/resources/test_metrics_config.properties
+++ b/core/src/test/resources/test_metrics_config.properties
@@ -1,6 +1,6 @@
*.sink.console.period = 10
*.sink.console.unit = seconds
-*.source.jvm.class = spark.metrics.source.JvmSource
+*.source.jvm.class = org.apache.spark.metrics.source.JvmSource
master.sink.console.period = 20
master.sink.console.unit = minutes
diff --git a/core/src/test/resources/test_metrics_system.properties b/core/src/test/resources/test_metrics_system.properties
index d5479f0298..6f5ecea93a 100644
--- a/core/src/test/resources/test_metrics_system.properties
+++ b/core/src/test/resources/test_metrics_system.properties
@@ -1,7 +1,7 @@
*.sink.console.period = 10
*.sink.console.unit = seconds
-test.sink.console.class = spark.metrics.sink.ConsoleSink
-test.sink.dummy.class = spark.metrics.sink.DummySink
-test.source.dummy.class = spark.metrics.source.DummySource
+test.sink.console.class = org.apache.spark.metrics.sink.ConsoleSink
+test.sink.dummy.class = org.apache.spark.metrics.sink.DummySink
+test.source.dummy.class = org.apache.spark.metrics.source.DummySource
test.sink.console.period = 20
test.sink.console.unit = minutes
diff --git a/core/src/test/scala/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 0af175f316..4434f3b87c 100644
--- a/core/src/test/scala/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
@@ -23,7 +23,7 @@ import collection.mutable
import java.util.Random
import scala.math.exp
import scala.math.signum
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
class AccumulatorSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
diff --git a/core/src/test/scala/spark/BroadcastSuite.scala b/core/src/test/scala/org/apache/spark/BroadcastSuite.scala
index 785721ece8..b3a53d928b 100644
--- a/core/src/test/scala/spark/BroadcastSuite.scala
+++ b/core/src/test/scala/org/apache/spark/BroadcastSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index 966dede2be..23b14f4245 100644
--- a/core/src/test/scala/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import java.io.File
-import spark.rdd._
-import spark.SparkContext._
+import org.apache.spark.rdd._
+import org.apache.spark.SparkContext._
import storage.StorageLevel
class CheckpointSuite extends FunSuite with LocalSparkContext with Logging {
diff --git a/core/src/test/scala/spark/ClosureCleanerSuite.scala b/core/src/test/scala/org/apache/spark/ClosureCleanerSuite.scala
index 7d2831e19c..8494899b98 100644
--- a/core/src/test/scala/spark/ClosureCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ClosureCleanerSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io.NotSerializableException
import org.scalatest.FunSuite
-import spark.LocalSparkContext._
+import org.apache.spark.LocalSparkContext._
import SparkContext._
class ClosureCleanerSuite extends FunSuite {
diff --git a/core/src/test/scala/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index e11efe459c..7a856d4081 100644
--- a/core/src/test/scala/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import network.ConnectionManagerId
import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala
index 553c0309f6..b08aad1a6f 100644
--- a/core/src/test/scala/spark/DriverSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io.File
@@ -34,7 +34,7 @@ class DriverSuite extends FunSuite with Timeouts {
val masters = Table(("master"), ("local"), ("local-cluster[2,1,512]"))
forAll(masters) { (master: String) =>
failAfter(30 seconds) {
- Utils.execute(Seq("./spark-class", "spark.DriverWithoutCleanup", master),
+ Utils.execute(Seq("./spark-class", "org.apache.spark.DriverWithoutCleanup", master),
new File(System.getenv("SPARK_HOME")))
}
}
diff --git a/core/src/test/scala/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index 5b133cdd6e..ee89a7a387 100644
--- a/core/src/test/scala/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
index 242ae971f8..35d1d41af1 100644
--- a/core/src/test/scala/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import com.google.common.io.Files
import org.scalatest.FunSuite
diff --git a/core/src/test/scala/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala
index 1e2c257c4b..7b82a4cdd9 100644
--- a/core/src/test/scala/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.io.{FileWriter, PrintWriter, File}
diff --git a/core/src/test/scala/spark/JavaAPISuite.java b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
index c337c49268..8a869c9005 100644
--- a/core/src/test/scala/spark/JavaAPISuite.java
+++ b/core/src/test/scala/org/apache/spark/JavaAPISuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark;
+package org.apache.spark;
import java.io.File;
import java.io.IOException;
@@ -38,15 +38,15 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import spark.api.java.JavaDoubleRDD;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.*;
-import spark.partial.BoundedDouble;
-import spark.partial.PartialResult;
-import spark.storage.StorageLevel;
-import spark.util.StatCounter;
+import org.apache.spark.api.java.JavaDoubleRDD;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.*;
+import org.apache.spark.partial.BoundedDouble;
+import org.apache.spark.partial.PartialResult;
+import org.apache.spark.storage.StorageLevel;
+import org.apache.spark.util.StatCounter;
// The test suite itself is Serializable so that anonymous Function implementations can be
diff --git a/core/src/test/scala/spark/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/KryoSerializerSuite.scala
index 7568a0bf65..d7b23c93fe 100644
--- a/core/src/test/scala/spark/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/KryoSerializerSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.mutable
@@ -167,7 +167,7 @@ class KryoSerializerSuite extends FunSuite with SharedSparkContext {
}
override def beforeAll() {
- System.setProperty("spark.serializer", "spark.KryoSerializer")
+ System.setProperty("spark.serializer", "org.apache.spark.KryoSerializer")
System.setProperty("spark.kryo.registrator", classOf[MyRegistrator].getName)
super.beforeAll()
}
diff --git a/core/src/test/scala/spark/LocalSparkContext.scala b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
index ddc212d290..6ec124da9c 100644
--- a/core/src/test/scala/spark/LocalSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.Suite
import org.scalatest.BeforeAndAfterEach
diff --git a/core/src/test/scala/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
index c21f3331d0..6013320eaa 100644
--- a/core/src/test/scala/spark/MapOutputTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import akka.actor._
-import spark.scheduler.MapStatus
-import spark.storage.BlockManagerId
-import spark.util.AkkaUtils
+import org.apache.spark.scheduler.MapStatus
+import org.apache.spark.storage.BlockManagerId
+import org.apache.spark.util.AkkaUtils
class MapOutputTrackerSuite extends FunSuite with LocalSparkContext {
diff --git a/core/src/test/scala/spark/PairRDDFunctionsSuite.scala b/core/src/test/scala/org/apache/spark/PairRDDFunctionsSuite.scala
index 328b3b5497..f79752b34e 100644
--- a/core/src/test/scala/spark/PairRDDFunctionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PairRDDFunctionsSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashSet
@@ -23,7 +23,7 @@ import scala.collection.mutable.HashSet
import org.scalatest.FunSuite
import com.google.common.io.Files
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
class PairRDDFunctionsSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/PartitionPruningRDDSuite.scala b/core/src/test/scala/org/apache/spark/PartitionPruningRDDSuite.scala
index 88352b639f..adbe805916 100644
--- a/core/src/test/scala/spark/PartitionPruningRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitionPruningRDDSuite.scala
@@ -1,8 +1,8 @@
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
-import spark.SparkContext._
-import spark.rdd.PartitionPruningRDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.PartitionPruningRDD
class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index b1e0b2b4d0..7669cf6fb1 100644
--- a/core/src/test/scala/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import scala.collection.mutable.ArrayBuffer
import SparkContext._
-import spark.util.StatCounter
+import org.apache.spark.util.StatCounter
import scala.math.abs
class PartitioningSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
index 35c04710a3..2e851d892d 100644
--- a/core/src/test/scala/spark/PipedRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import SparkContext._
diff --git a/core/src/test/scala/spark/RDDSuite.scala b/core/src/test/scala/org/apache/spark/RDDSuite.scala
index e306952bbd..342ba8adb2 100644
--- a/core/src/test/scala/spark/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/RDDSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.mutable.HashMap
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts._
import org.scalatest.time.{Span, Millis}
-import spark.SparkContext._
-import spark.rdd._
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd._
import scala.collection.parallel.mutable
class RDDSuite extends FunSuite with SharedSparkContext {
diff --git a/core/src/test/scala/spark/SharedSparkContext.scala b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
index 70c24515be..97cbca09bf 100644
--- a/core/src/test/scala/spark/SharedSparkContext.scala
+++ b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.Suite
import org.scalatest.BeforeAndAfterAll
diff --git a/core/src/test/scala/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
index 6bad6c1d13..e121b162ad 100644
--- a/core/src/test/scala/spark/ShuffleNettySuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.BeforeAndAfterAll
diff --git a/core/src/test/scala/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index 8745689c70..357175e89e 100644
--- a/core/src/test/scala/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
-import spark.SparkContext._
-import spark.ShuffleSuite.NonJavaSerializableClass
-import spark.rdd.{SubtractedRDD, CoGroupedRDD, OrderedRDDFunctions, ShuffledRDD}
-import spark.util.MutablePair
+import org.apache.spark.SparkContext._
+import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
+import org.apache.spark.rdd.{SubtractedRDD, CoGroupedRDD, OrderedRDDFunctions, ShuffledRDD}
+import org.apache.spark.util.MutablePair
class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
@@ -54,7 +54,7 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
// If the Kryo serializer is not used correctly, the shuffle would fail because the
// default Java serializer cannot handle the non serializable class.
val c = new ShuffledRDD[Int, NonJavaSerializableClass, (Int, NonJavaSerializableClass)](
- b, new HashPartitioner(NUM_BLOCKS)).setSerializer(classOf[spark.KryoSerializer].getName)
+ b, new HashPartitioner(NUM_BLOCKS)).setSerializer(classOf[KryoSerializer].getName)
val shuffleId = c.dependencies.head.asInstanceOf[ShuffleDependency[Int, Int]].shuffleId
assert(c.count === 10)
@@ -76,7 +76,7 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
// If the Kryo serializer is not used correctly, the shuffle would fail because the
// default Java serializer cannot handle the non serializable class.
val c = new ShuffledRDD[Int, NonJavaSerializableClass, (Int, NonJavaSerializableClass)](
- b, new HashPartitioner(3)).setSerializer(classOf[spark.KryoSerializer].getName)
+ b, new HashPartitioner(3)).setSerializer(classOf[KryoSerializer].getName)
assert(c.count === 10)
}
@@ -92,7 +92,7 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
// NOTE: The default Java serializer doesn't create zero-sized blocks.
// So, use Kryo
val c = new ShuffledRDD[Int, Int, (Int, Int)](b, new HashPartitioner(10))
- .setSerializer(classOf[spark.KryoSerializer].getName)
+ .setSerializer(classOf[KryoSerializer].getName)
val shuffleId = c.dependencies.head.asInstanceOf[ShuffleDependency[Int, Int]].shuffleId
assert(c.count === 4)
diff --git a/core/src/test/scala/spark/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/SizeEstimatorSuite.scala
index 1ef812dfbd..214ac74898 100644
--- a/core/src/test/scala/spark/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SizeEstimatorSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfterAll
diff --git a/core/src/test/scala/spark/SortingSuite.scala b/core/src/test/scala/org/apache/spark/SortingSuite.scala
index b933c4aab8..f4fa9511dd 100644
--- a/core/src/test/scala/spark/SortingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SortingSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
diff --git a/core/src/test/scala/spark/SparkContextInfoSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
index 6d50bf5e1b..939fe51801 100644
--- a/core/src/test/scala/spark/SparkContextInfoSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
test("getPersistentRDDs only returns RDDs that are marked as cached") {
@@ -57,4 +57,4 @@ class SparkContextInfoSuite extends FunSuite with LocalSparkContext {
rdd.collect()
assert(sc.getRDDStorageInfo.size === 1)
}
-} \ No newline at end of file
+}
diff --git a/core/src/test/scala/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
index f2acd0bd3c..69383ddfb8 100644
--- a/core/src/test/scala/spark/ThreadingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import java.util.concurrent.Semaphore
import java.util.concurrent.atomic.AtomicBoolean
diff --git a/core/src/test/scala/spark/UnpersistSuite.scala b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
index 93977d16f4..46a2da1724 100644
--- a/core/src/test/scala/spark/UnpersistSuite.scala
+++ b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.FunSuite
import org.scalatest.concurrent.Timeouts._
import org.scalatest.time.{Span, Millis}
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
class UnpersistSuite extends FunSuite with LocalSparkContext {
test("unpersist RDD") {
diff --git a/core/src/test/scala/spark/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/UtilsSuite.scala
index 98a6c1a1c9..3a908720a8 100644
--- a/core/src/test/scala/spark/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/UtilsSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import com.google.common.base.Charsets
import com.google.common.io.Files
diff --git a/core/src/test/scala/spark/ZippedPartitionsSuite.scala b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
index bb5d379273..618b9c113b 100644
--- a/core/src/test/scala/spark/ZippedPartitionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import scala.collection.immutable.NumericRange
diff --git a/core/src/test/scala/spark/io/CompressionCodecSuite.scala b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
index 1ba82fe2b9..fd6f69041a 100644
--- a/core/src/test/scala/spark/io/CompressionCodecSuite.scala
+++ b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.io
+package org.apache.spark.io
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
diff --git a/core/src/test/scala/spark/metrics/MetricsConfigSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
index b0213b62d9..58c94a162d 100644
--- a/core/src/test/scala/spark/metrics/MetricsConfigSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsConfigSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.metrics
+package org.apache.spark.metrics
import org.scalatest.{BeforeAndAfter, FunSuite}
@@ -35,7 +35,7 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
val property = conf.getInstance("random")
assert(property.size() === 3)
- assert(property.getProperty("sink.servlet.class") === "spark.metrics.sink.MetricsServlet")
+ assert(property.getProperty("sink.servlet.class") === "org.apache.spark.metrics.sink.MetricsServlet")
assert(property.getProperty("sink.servlet.uri") === "/metrics/json")
assert(property.getProperty("sink.servlet.sample") === "false")
}
@@ -48,8 +48,8 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
assert(masterProp.size() === 6)
assert(masterProp.getProperty("sink.console.period") === "20")
assert(masterProp.getProperty("sink.console.unit") === "minutes")
- assert(masterProp.getProperty("source.jvm.class") === "spark.metrics.source.JvmSource")
- assert(masterProp.getProperty("sink.servlet.class") === "spark.metrics.sink.MetricsServlet")
+ assert(masterProp.getProperty("source.jvm.class") === "org.apache.spark.metrics.source.JvmSource")
+ assert(masterProp.getProperty("sink.servlet.class") === "org.apache.spark.metrics.sink.MetricsServlet")
assert(masterProp.getProperty("sink.servlet.uri") === "/metrics/master/json")
assert(masterProp.getProperty("sink.servlet.sample") === "false")
@@ -57,8 +57,8 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
assert(workerProp.size() === 6)
assert(workerProp.getProperty("sink.console.period") === "10")
assert(workerProp.getProperty("sink.console.unit") === "seconds")
- assert(workerProp.getProperty("source.jvm.class") === "spark.metrics.source.JvmSource")
- assert(workerProp.getProperty("sink.servlet.class") === "spark.metrics.sink.MetricsServlet")
+ assert(workerProp.getProperty("source.jvm.class") === "org.apache.spark.metrics.source.JvmSource")
+ assert(workerProp.getProperty("sink.servlet.class") === "org.apache.spark.metrics.sink.MetricsServlet")
assert(workerProp.getProperty("sink.servlet.uri") === "/metrics/json")
assert(workerProp.getProperty("sink.servlet.sample") === "false")
}
@@ -73,7 +73,7 @@ class MetricsConfigSuite extends FunSuite with BeforeAndAfter {
val masterProp = conf.getInstance("master")
val sourceProps = conf.subProperties(masterProp, MetricsSystem.SOURCE_REGEX)
assert(sourceProps.size === 1)
- assert(sourceProps("jvm").getProperty("class") === "spark.metrics.source.JvmSource")
+ assert(sourceProps("jvm").getProperty("class") === "org.apache.spark.metrics.source.JvmSource")
val sinkProps = conf.subProperties(masterProp, MetricsSystem.SINK_REGEX)
assert(sinkProps.size === 2)
diff --git a/core/src/test/scala/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
index dc65ac6994..7181333adf 100644
--- a/core/src/test/scala/spark/metrics/MetricsSystemSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala
@@ -15,9 +15,10 @@
* limitations under the License.
*/
-package spark.metrics
+package org.apache.spark.metrics
import org.scalatest.{BeforeAndAfter, FunSuite}
+import org.apache.spark.deploy.master.MasterSource
class MetricsSystemSuite extends FunSuite with BeforeAndAfter {
var filePath: String = _
@@ -46,7 +47,7 @@ class MetricsSystemSuite extends FunSuite with BeforeAndAfter {
assert(sinks.length === 1)
assert(!metricsSystem.getServletHandlers.isEmpty)
- val source = new spark.deploy.master.MasterSource(null)
+ val source = new MasterSource(null)
metricsSystem.registerSource(source)
assert(sources.length === 1)
}
diff --git a/core/src/test/scala/spark/rdd/JdbcRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
index dc8ca941c1..3d39a31252 100644
--- a/core/src/test/scala/spark/rdd/JdbcRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark
+package org.apache.spark
import org.scalatest.{ BeforeAndAfter, FunSuite }
-import spark.SparkContext._
-import spark.rdd.JdbcRDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.rdd.JdbcRDD
import java.sql._
class JdbcRDDSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
diff --git a/core/src/test/scala/spark/rdd/ParallelCollectionSplitSuite.scala b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
index d1276d541f..a80afdee7e 100644
--- a/core/src/test/scala/spark/rdd/ParallelCollectionSplitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.rdd
+package org.apache.spark.rdd
import scala.collection.immutable.NumericRange
diff --git a/core/src/test/scala/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index 3b4a0d52fc..94df282b28 100644
--- a/core/src/test/scala/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -15,26 +15,26 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import scala.collection.mutable.{Map, HashMap}
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
-import spark.LocalSparkContext
-import spark.MapOutputTracker
-import spark.RDD
-import spark.SparkContext
-import spark.Partition
-import spark.TaskContext
-import spark.{Dependency, ShuffleDependency, OneToOneDependency}
-import spark.{FetchFailed, Success, TaskEndReason}
-import spark.storage.{BlockManagerId, BlockManagerMaster}
-
-import spark.scheduler.cluster.Pool
-import spark.scheduler.cluster.SchedulingMode
-import spark.scheduler.cluster.SchedulingMode.SchedulingMode
+import org.apache.spark.LocalSparkContext
+import org.apache.spark.MapOutputTracker
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext
+import org.apache.spark.Partition
+import org.apache.spark.TaskContext
+import org.apache.spark.{Dependency, ShuffleDependency, OneToOneDependency}
+import org.apache.spark.{FetchFailed, Success, TaskEndReason}
+import org.apache.spark.storage.{BlockManagerId, BlockManagerMaster}
+
+import org.apache.spark.scheduler.cluster.Pool
+import org.apache.spark.scheduler.cluster.SchedulingMode
+import org.apache.spark.scheduler.cluster.SchedulingMode.SchedulingMode
/**
* Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler
diff --git a/core/src/test/scala/spark/scheduler/JobLoggerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
index bb9e715f95..f5b3e97222 100644
--- a/core/src/test/scala/spark/scheduler/JobLoggerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import java.util.Properties
import java.util.concurrent.LinkedBlockingQueue
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
import scala.collection.mutable
-import spark._
-import spark.SparkContext._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
class JobLoggerSuite extends FunSuite with LocalSparkContext with ShouldMatchers {
diff --git a/core/src/test/scala/spark/scheduler/SparkListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
index 392d67d67b..aac7c207cb 100644
--- a/core/src/test/scala/spark/scheduler/SparkListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import org.scalatest.FunSuite
-import spark.{SparkContext, LocalSparkContext}
+import org.apache.spark.{SparkContext, LocalSparkContext}
import scala.collection.mutable
import org.scalatest.matchers.ShouldMatchers
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
/**
*
diff --git a/core/src/test/scala/spark/scheduler/TaskContextSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
index 95a6eee2fc..0347cc02d7 100644
--- a/core/src/test/scala/spark/scheduler/TaskContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.scheduler
+package org.apache.spark.scheduler
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
-import spark.TaskContext
-import spark.RDD
-import spark.SparkContext
-import spark.Partition
-import spark.LocalSparkContext
+import org.apache.spark.TaskContext
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext
+import org.apache.spark.Partition
+import org.apache.spark.LocalSparkContext
class TaskContextSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
diff --git a/core/src/test/scala/spark/scheduler/cluster/ClusterSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
index abfdabf5fe..92ad9f09b2 100644
--- a/core/src/test/scala/spark/scheduler/cluster/ClusterSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterSchedulerSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
-import spark._
-import spark.scheduler._
-import spark.scheduler.cluster._
+import org.apache.spark._
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster._
import scala.collection.mutable.ArrayBuffer
import java.util.Properties
diff --git a/core/src/test/scala/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
index 5a0b949ef5..a4f63baf3d 100644
--- a/core/src/test/scala/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/ClusterTaskSetManagerSuite.scala
@@ -15,18 +15,18 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable
import org.scalatest.FunSuite
-import spark._
-import spark.scheduler._
-import spark.executor.TaskMetrics
+import org.apache.spark._
+import org.apache.spark.scheduler._
+import org.apache.spark.executor.TaskMetrics
import java.nio.ByteBuffer
-import spark.util.FakeClock
+import org.apache.spark.util.FakeClock
/**
* A mock ClusterScheduler implementation that just remembers information about tasks started and
diff --git a/core/src/test/scala/spark/scheduler/cluster/FakeTask.scala b/core/src/test/scala/org/apache/spark/scheduler/cluster/FakeTask.scala
index de9e66be20..2f12aaed18 100644
--- a/core/src/test/scala/spark/scheduler/cluster/FakeTask.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/cluster/FakeTask.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
-import spark.scheduler.{TaskLocation, Task}
+import org.apache.spark.scheduler.{TaskLocation, Task}
class FakeTask(stageId: Int, prefLocs: Seq[TaskLocation] = Nil) extends Task[Int](stageId) {
override def run(attemptId: Long): Int = 0
diff --git a/core/src/test/scala/spark/scheduler/local/LocalSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
index d28ee47fa3..111340a65c 100644
--- a/core/src/test/scala/spark/scheduler/local/LocalSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/local/LocalSchedulerSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.scheduler.local
+package org.apache.spark.scheduler.local
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
-import spark._
-import spark.scheduler._
-import spark.scheduler.cluster._
+import org.apache.spark._
+import org.apache.spark.scheduler._
+import org.apache.spark.scheduler.cluster._
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.{ConcurrentMap, HashMap}
import java.util.concurrent.Semaphore
diff --git a/core/src/test/scala/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
index b719d65342..88ba10f2f2 100644
--- a/core/src/test/scala/spark/storage/BlockManagerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.storage
+package org.apache.spark.storage
import java.nio.ByteBuffer
@@ -29,11 +29,12 @@ import org.scalatest.concurrent.Timeouts._
import org.scalatest.matchers.ShouldMatchers._
import org.scalatest.time.SpanSugar._
-import spark.JavaSerializer
-import spark.KryoSerializer
-import spark.SizeEstimator
-import spark.util.AkkaUtils
-import spark.util.ByteBufferInputStream
+import org.apache.spark.JavaSerializer
+import org.apache.spark.KryoSerializer
+import org.apache.spark.SizeEstimator
+import org.apache.spark.Utils
+import org.apache.spark.util.AkkaUtils
+import org.apache.spark.util.ByteBufferInputStream
class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester {
@@ -56,7 +57,7 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
System.setProperty("spark.hostPort", "localhost:" + boundPort)
master = new BlockManagerMaster(
- actorSystem.actorOf(Props(new spark.storage.BlockManagerMasterActor(true))))
+ actorSystem.actorOf(Props(new BlockManagerMasterActor(true))))
// Set the arch to 64-bit and compressedOops to true to get a deterministic test-case
oldArch = System.setProperty("os.arch", "amd64")
@@ -65,7 +66,7 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
val initialize = PrivateMethod[Unit]('initialize)
SizeEstimator invokePrivate initialize()
// Set some value ...
- System.setProperty("spark.hostPort", spark.Utils.localHostName() + ":" + 1111)
+ System.setProperty("spark.hostPort", Utils.localHostName() + ":" + 1111)
}
after {
@@ -105,10 +106,10 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
assert(level2 === level1, "level2 is not same as level1")
assert(level2.eq(level1), "level2 is not the same object as level1")
assert(level3 != level1, "level3 is same as level1")
- val bytes1 = spark.Utils.serialize(level1)
- val level1_ = spark.Utils.deserialize[StorageLevel](bytes1)
- val bytes2 = spark.Utils.serialize(level2)
- val level2_ = spark.Utils.deserialize[StorageLevel](bytes2)
+ val bytes1 = Utils.serialize(level1)
+ val level1_ = Utils.deserialize[StorageLevel](bytes1)
+ val bytes2 = Utils.serialize(level2)
+ val level2_ = Utils.deserialize[StorageLevel](bytes2)
assert(level1_ === level1, "Deserialized level1 not same as original level1")
assert(level1_.eq(level1), "Deserialized level1 not the same object as original level2")
assert(level2_ === level2, "Deserialized level2 not same as original level2")
@@ -122,10 +123,10 @@ class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodT
assert(id2 === id1, "id2 is not same as id1")
assert(id2.eq(id1), "id2 is not the same object as id1")
assert(id3 != id1, "id3 is same as id1")
- val bytes1 = spark.Utils.serialize(id1)
- val id1_ = spark.Utils.deserialize[BlockManagerId](bytes1)
- val bytes2 = spark.Utils.serialize(id2)
- val id2_ = spark.Utils.deserialize[BlockManagerId](bytes2)
+ val bytes1 = Utils.serialize(id1)
+ val id1_ = Utils.deserialize[BlockManagerId](bytes1)
+ val bytes2 = Utils.serialize(id2)
+ val id2_ = Utils.deserialize[BlockManagerId](bytes2)
assert(id1_ === id1, "Deserialized id1 is not same as original id1")
assert(id1_.eq(id1), "Deserialized id1 is not the same object as original id1")
assert(id2_ === id2, "Deserialized id2 is not same as original id2")
diff --git a/core/src/test/scala/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index 735a794396..3321fb5eb7 100644
--- a/core/src/test/scala/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.ui
+package org.apache.spark.ui
import scala.util.{Failure, Success, Try}
import java.net.ServerSocket
diff --git a/core/src/test/scala/spark/util/DistributionSuite.scala b/core/src/test/scala/org/apache/spark/util/DistributionSuite.scala
index 6578b55e82..63642461e4 100644
--- a/core/src/test/scala/spark/util/DistributionSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/DistributionSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
diff --git a/core/src/test/scala/spark/util/FakeClock.scala b/core/src/test/scala/org/apache/spark/util/FakeClock.scala
index 236706317e..0a45917b08 100644
--- a/core/src/test/scala/spark/util/FakeClock.scala
+++ b/core/src/test/scala/org/apache/spark/util/FakeClock.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
class FakeClock extends Clock {
private var time = 0L
diff --git a/core/src/test/scala/spark/util/NextIteratorSuite.scala b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
index fdbd43d941..45867463a5 100644
--- a/core/src/test/scala/spark/util/NextIteratorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
diff --git a/core/src/test/scala/spark/util/RateLimitedOutputStreamSuite.scala b/core/src/test/scala/org/apache/spark/util/RateLimitedOutputStreamSuite.scala
index 4c0044202f..a9dd0b1a5b 100644
--- a/core/src/test/scala/spark/util/RateLimitedOutputStreamSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/RateLimitedOutputStreamSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.util
+package org.apache.spark.util
import org.scalatest.FunSuite
import java.io.ByteArrayOutputStream
diff --git a/docs/_layouts/global.html b/docs/_layouts/global.html
index 84749fda4e..349eb92a47 100755
--- a/docs/_layouts/global.html
+++ b/docs/_layouts/global.html
@@ -100,7 +100,7 @@
<li><a href="tuning.html">Tuning Guide</a></li>
<li><a href="hardware-provisioning.html">Hardware Provisioning</a></li>
<li><a href="building-with-maven.html">Building Spark with Maven</a></li>
- <li><a href="contributing-to-spark.html">Contributing to Spark</a></li>
+ <li><a href="contributing-to-org.apache.spark.html">Contributing to Spark</a></li>
</ul>
</li>
</ul>
diff --git a/examples/pom.xml b/examples/pom.xml
index 687fbcca8f..13b5531511 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-examples</artifactId>
<packaging>jar</packaging>
<name>Spark Project Examples</name>
@@ -33,25 +33,25 @@
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-streaming</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-mllib</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-bagel</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
@@ -132,7 +132,7 @@
<id>hadoop2-yarn</id>
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-yarn</artifactId>
<version>${project.version}</version>
<scope>provided</scope>
diff --git a/examples/src/main/java/spark/examples/JavaHdfsLR.java b/examples/src/main/java/org/apache/spark/examples/JavaHdfsLR.java
index 9485e0cfa9..be0d38589c 100644
--- a/examples/src/main/java/spark/examples/JavaHdfsLR.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaHdfsLR.java
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.examples;
+package org.apache.spark.examples;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
-import spark.api.java.function.Function2;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.api.java.function.Function2;
import java.io.Serializable;
import java.util.Arrays;
diff --git a/examples/src/main/java/spark/examples/JavaKMeans.java b/examples/src/main/java/org/apache/spark/examples/JavaKMeans.java
index 2d34776177..5a6afe7eae 100644
--- a/examples/src/main/java/spark/examples/JavaKMeans.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaKMeans.java
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.examples;
+package org.apache.spark.examples;
import scala.Tuple2;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
-import spark.api.java.function.PairFunction;
-import spark.util.Vector;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.api.java.function.PairFunction;
+import org.apache.spark.util.Vector;
import java.util.List;
import java.util.Map;
diff --git a/examples/src/main/java/spark/examples/JavaLogQuery.java b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
index d22684d980..152f029213 100644
--- a/examples/src/main/java/spark/examples/JavaLogQuery.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package spark.examples;
+package org.apache.spark.examples;
import com.google.common.collect.Lists;
import scala.Tuple2;
import scala.Tuple3;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFunction;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFunction;
import java.io.Serializable;
import java.util.Collections;
diff --git a/examples/src/main/java/spark/examples/JavaPageRank.java b/examples/src/main/java/org/apache/spark/examples/JavaPageRank.java
index 75df1af2e3..c5603a639b 100644
--- a/examples/src/main/java/spark/examples/JavaPageRank.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaPageRank.java
@@ -15,17 +15,17 @@
* limitations under the License.
*/
-package spark.examples;
+package org.apache.spark.examples;
import scala.Tuple2;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.FlatMapFunction;
-import spark.api.java.function.Function;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFlatMapFunction;
-import spark.api.java.function.PairFunction;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.FlatMapFunction;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFlatMapFunction;
+import org.apache.spark.api.java.function.PairFunction;
import java.util.List;
import java.util.ArrayList;
diff --git a/examples/src/main/java/spark/examples/JavaSparkPi.java b/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
index d5f42fbb38..4a2380caf5 100644
--- a/examples/src/main/java/spark/examples/JavaSparkPi.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaSparkPi.java
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.examples;
+package org.apache.spark.examples;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
-import spark.api.java.function.Function2;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.api.java.function.Function2;
import java.util.ArrayList;
import java.util.List;
diff --git a/examples/src/main/java/spark/examples/JavaTC.java b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
index 559d7f9e53..17f21f6b77 100644
--- a/examples/src/main/java/spark/examples/JavaTC.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaTC.java
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.examples;
+package org.apache.spark.examples;
import scala.Tuple2;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.PairFunction;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.PairFunction;
import java.util.ArrayList;
import java.util.HashSet;
diff --git a/examples/src/main/java/spark/examples/JavaWordCount.java b/examples/src/main/java/org/apache/spark/examples/JavaWordCount.java
index 1af370c1c3..07d32ad659 100644
--- a/examples/src/main/java/spark/examples/JavaWordCount.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaWordCount.java
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.examples;
+package org.apache.spark.examples;
import scala.Tuple2;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.FlatMapFunction;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFunction;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.FlatMapFunction;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFunction;
import java.util.Arrays;
import java.util.List;
diff --git a/examples/src/main/java/spark/mllib/examples/JavaALS.java b/examples/src/main/java/org/apache/spark/mllib/examples/JavaALS.java
index b48f459cb7..628cb892b6 100644
--- a/examples/src/main/java/spark/mllib/examples/JavaALS.java
+++ b/examples/src/main/java/org/apache/spark/mllib/examples/JavaALS.java
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.mllib.examples;
+package org.apache.spark.mllib.examples;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
-import spark.mllib.recommendation.ALS;
-import spark.mllib.recommendation.MatrixFactorizationModel;
-import spark.mllib.recommendation.Rating;
+import org.apache.spark.mllib.recommendation.ALS;
+import org.apache.spark.mllib.recommendation.MatrixFactorizationModel;
+import org.apache.spark.mllib.recommendation.Rating;
import java.io.Serializable;
import java.util.Arrays;
diff --git a/examples/src/main/java/spark/mllib/examples/JavaKMeans.java b/examples/src/main/java/org/apache/spark/mllib/examples/JavaKMeans.java
index 02f40438b8..cd59a139b9 100644
--- a/examples/src/main/java/spark/mllib/examples/JavaKMeans.java
+++ b/examples/src/main/java/org/apache/spark/mllib/examples/JavaKMeans.java
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.mllib.examples;
+package org.apache.spark.mllib.examples;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
-import spark.mllib.clustering.KMeans;
-import spark.mllib.clustering.KMeansModel;
+import org.apache.spark.mllib.clustering.KMeans;
+import org.apache.spark.mllib.clustering.KMeansModel;
import java.util.Arrays;
import java.util.StringTokenizer;
diff --git a/examples/src/main/java/spark/mllib/examples/JavaLR.java b/examples/src/main/java/org/apache/spark/mllib/examples/JavaLR.java
index bf4aeaf40f..258061c8e6 100644
--- a/examples/src/main/java/spark/mllib/examples/JavaLR.java
+++ b/examples/src/main/java/org/apache/spark/mllib/examples/JavaLR.java
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package spark.mllib.examples;
+package org.apache.spark.mllib.examples;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.Function;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.Function;
-import spark.mllib.classification.LogisticRegressionWithSGD;
-import spark.mllib.classification.LogisticRegressionModel;
-import spark.mllib.regression.LabeledPoint;
+import org.apache.spark.mllib.classification.LogisticRegressionWithSGD;
+import org.apache.spark.mllib.classification.LogisticRegressionModel;
+import org.apache.spark.mllib.regression.LabeledPoint;
import java.util.Arrays;
import java.util.StringTokenizer;
diff --git a/examples/src/main/java/spark/streaming/examples/JavaFlumeEventCount.java b/examples/src/main/java/org/apache/spark/streaming/examples/JavaFlumeEventCount.java
index 096a9ae219..261813bf2f 100644
--- a/examples/src/main/java/spark/streaming/examples/JavaFlumeEventCount.java
+++ b/examples/src/main/java/org/apache/spark/streaming/examples/JavaFlumeEventCount.java
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.streaming.examples;
+package org.apache.spark.streaming.examples;
-import spark.api.java.function.Function;
-import spark.streaming.*;
-import spark.streaming.api.java.*;
-import spark.streaming.dstream.SparkFlumeEvent;
+import org.apache.spark.api.java.function.Function;
+import org.apache.spark.streaming.*;
+import org.apache.spark.streaming.api.java.*;
+import org.apache.spark.streaming.dstream.SparkFlumeEvent;
/**
* Produces a count of events received from Flume.
diff --git a/examples/src/main/java/spark/streaming/examples/JavaNetworkWordCount.java b/examples/src/main/java/org/apache/spark/streaming/examples/JavaNetworkWordCount.java
index c54d3f3d59..def87c199b 100644
--- a/examples/src/main/java/spark/streaming/examples/JavaNetworkWordCount.java
+++ b/examples/src/main/java/org/apache/spark/streaming/examples/JavaNetworkWordCount.java
@@ -15,17 +15,17 @@
* limitations under the License.
*/
-package spark.streaming.examples;
+package org.apache.spark.streaming.examples;
import com.google.common.collect.Lists;
import scala.Tuple2;
-import spark.api.java.function.FlatMapFunction;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFunction;
-import spark.streaming.Duration;
-import spark.streaming.api.java.JavaDStream;
-import spark.streaming.api.java.JavaPairDStream;
-import spark.streaming.api.java.JavaStreamingContext;
+import org.apache.spark.api.java.function.FlatMapFunction;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFunction;
+import org.apache.spark.streaming.Duration;
+import org.apache.spark.streaming.api.java.JavaDStream;
+import org.apache.spark.streaming.api.java.JavaPairDStream;
+import org.apache.spark.streaming.api.java.JavaStreamingContext;
/**
* Counts words in UTF8 encoded, '\n' delimited text received from the network every second.
diff --git a/examples/src/main/java/spark/streaming/examples/JavaQueueStream.java b/examples/src/main/java/org/apache/spark/streaming/examples/JavaQueueStream.java
index 1f4a991542..c8c7389dd1 100644
--- a/examples/src/main/java/spark/streaming/examples/JavaQueueStream.java
+++ b/examples/src/main/java/org/apache/spark/streaming/examples/JavaQueueStream.java
@@ -15,17 +15,17 @@
* limitations under the License.
*/
-package spark.streaming.examples;
+package org.apache.spark.streaming.examples;
import com.google.common.collect.Lists;
import scala.Tuple2;
-import spark.api.java.JavaRDD;
-import spark.api.java.function.Function2;
-import spark.api.java.function.PairFunction;
-import spark.streaming.Duration;
-import spark.streaming.api.java.JavaDStream;
-import spark.streaming.api.java.JavaPairDStream;
-import spark.streaming.api.java.JavaStreamingContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.function.Function2;
+import org.apache.spark.api.java.function.PairFunction;
+import org.apache.spark.streaming.Duration;
+import org.apache.spark.streaming.api.java.JavaDStream;
+import org.apache.spark.streaming.api.java.JavaPairDStream;
+import org.apache.spark.streaming.api.java.JavaStreamingContext;
import java.util.LinkedList;
import java.util.List;
diff --git a/examples/src/main/scala/spark/examples/BroadcastTest.scala b/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
index 911490cb6c..868ff81f67 100644
--- a/examples/src/main/scala/spark/examples/BroadcastTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/BroadcastTest.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark.SparkContext
+import org.apache.spark.SparkContext
object BroadcastTest {
def main(args: Array[String]) {
diff --git a/examples/src/main/scala/spark/examples/CassandraTest.scala b/examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala
index 104bfd5204..33bf7151a7 100644
--- a/examples/src/main/scala/spark/examples/CassandraTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import org.apache.hadoop.mapreduce.Job
import org.apache.cassandra.hadoop.ColumnFamilyOutputFormat
import org.apache.cassandra.hadoop.ConfigHelper
import org.apache.cassandra.hadoop.ColumnFamilyInputFormat
import org.apache.cassandra.thrift._
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
import java.nio.ByteBuffer
import java.util.SortedMap
import org.apache.cassandra.db.IColumn
diff --git a/examples/src/main/scala/spark/examples/ExceptionHandlingTest.scala b/examples/src/main/scala/org/apache/spark/examples/ExceptionHandlingTest.scala
index 67ddaec8d2..92eb96bd8e 100644
--- a/examples/src/main/scala/spark/examples/ExceptionHandlingTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/ExceptionHandlingTest.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark.SparkContext
+import org.apache.spark.SparkContext
object ExceptionHandlingTest {
def main(args: Array[String]) {
diff --git a/examples/src/main/scala/spark/examples/GroupByTest.scala b/examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala
index 5cee413615..42c2e0e8e1 100644
--- a/examples/src/main/scala/spark/examples/GroupByTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/GroupByTest.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
import java.util.Random
object GroupByTest {
diff --git a/examples/src/main/scala/spark/examples/HBaseTest.scala b/examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala
index 4dd6c243ac..efe2e93b0d 100644
--- a/examples/src/main/scala/spark/examples/HBaseTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/HBaseTest.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark._
-import spark.rdd.NewHadoopRDD
+import org.apache.spark._
+import org.apache.spark.rdd.NewHadoopRDD
import org.apache.hadoop.hbase.{HBaseConfiguration, HTableDescriptor}
import org.apache.hadoop.hbase.client.HBaseAdmin
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
diff --git a/examples/src/main/scala/spark/examples/HdfsTest.scala b/examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala
index 23258336e2..d6a88d3032 100644
--- a/examples/src/main/scala/spark/examples/HdfsTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/HdfsTest.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark._
+import org.apache.spark._
object HdfsTest {
def main(args: Array[String]) {
diff --git a/examples/src/main/scala/spark/examples/LocalALS.scala b/examples/src/main/scala/org/apache/spark/examples/LocalALS.scala
index 7a449a9d72..4af45b2b4a 100644
--- a/examples/src/main/scala/spark/examples/LocalALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalALS.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import scala.math.sqrt
import cern.jet.math._
diff --git a/examples/src/main/scala/spark/examples/LocalFileLR.scala b/examples/src/main/scala/org/apache/spark/examples/LocalFileLR.scala
index c1f8d32aa8..fb130ea198 100644
--- a/examples/src/main/scala/spark/examples/LocalFileLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalFileLR.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import java.util.Random
-import spark.util.Vector
+import org.apache.spark.util.Vector
object LocalFileLR {
val D = 10 // Numer of dimensions
diff --git a/examples/src/main/scala/spark/examples/LocalKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
index 0a0bc6f476..f90ea35cd4 100644
--- a/examples/src/main/scala/spark/examples/LocalKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalKMeans.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import java.util.Random
-import spark.util.Vector
-import spark.SparkContext._
+import org.apache.spark.util.Vector
+import org.apache.spark.SparkContext._
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
diff --git a/examples/src/main/scala/spark/examples/LocalLR.scala b/examples/src/main/scala/org/apache/spark/examples/LocalLR.scala
index ab99bf1fbe..cd4e9f1af0 100644
--- a/examples/src/main/scala/spark/examples/LocalLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalLR.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import java.util.Random
-import spark.util.Vector
+import org.apache.spark.util.Vector
/**
* Logistic regression based classification.
diff --git a/examples/src/main/scala/spark/examples/LocalPi.scala b/examples/src/main/scala/org/apache/spark/examples/LocalPi.scala
index ccd69695df..bb7f22ec8d 100644
--- a/examples/src/main/scala/spark/examples/LocalPi.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LocalPi.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import scala.math.random
-import spark._
+import org.apache.spark._
import SparkContext._
object LocalPi {
diff --git a/examples/src/main/scala/spark/examples/LogQuery.scala b/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
index e815ececf7..17ff3ce764 100644
--- a/examples/src/main/scala/spark/examples/LogQuery.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/LogQuery.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
/**
* Executes a roll up-style query against Apache logs.
*/
diff --git a/examples/src/main/scala/spark/examples/MultiBroadcastTest.scala b/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
index d0b1cf06e5..f79f0142b8 100644
--- a/examples/src/main/scala/spark/examples/MultiBroadcastTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/MultiBroadcastTest.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark.SparkContext
+import org.apache.spark.SparkContext
object MultiBroadcastTest {
def main(args: Array[String]) {
diff --git a/examples/src/main/scala/spark/examples/SimpleSkewedGroupByTest.scala b/examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala
index d197bbaf7c..37ddfb5db7 100644
--- a/examples/src/main/scala/spark/examples/SimpleSkewedGroupByTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SimpleSkewedGroupByTest.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
import java.util.Random
object SimpleSkewedGroupByTest {
diff --git a/examples/src/main/scala/spark/examples/SkewedGroupByTest.scala b/examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala
index 4641b82444..9c954b2b5b 100644
--- a/examples/src/main/scala/spark/examples/SkewedGroupByTest.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SkewedGroupByTest.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
import java.util.Random
object SkewedGroupByTest {
diff --git a/examples/src/main/scala/spark/examples/SparkALS.scala b/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
index ba0dfd8f9b..814944ba1c 100644
--- a/examples/src/main/scala/spark/examples/SparkALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import scala.math.sqrt
import cern.jet.math._
import cern.colt.matrix._
import cern.colt.matrix.linalg._
-import spark._
+import org.apache.spark._
/**
* Alternating least squares matrix factorization.
diff --git a/examples/src/main/scala/spark/examples/SparkHdfsLR.scala b/examples/src/main/scala/org/apache/spark/examples/SparkHdfsLR.scala
index 43c9115664..646682878f 100644
--- a/examples/src/main/scala/spark/examples/SparkHdfsLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkHdfsLR.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import java.util.Random
import scala.math.exp
-import spark.util.Vector
-import spark._
-import spark.scheduler.InputFormatInfo
+import org.apache.spark.util.Vector
+import org.apache.spark._
+import org.apache.spark.scheduler.InputFormatInfo
/**
* Logistic regression based classification.
diff --git a/examples/src/main/scala/spark/examples/SparkKMeans.scala b/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
index 38ed3b149a..f7bf75b4e5 100644
--- a/examples/src/main/scala/spark/examples/SparkKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import java.util.Random
-import spark.SparkContext
-import spark.util.Vector
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.util.Vector
+import org.apache.spark.SparkContext._
import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
diff --git a/examples/src/main/scala/spark/examples/SparkLR.scala b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
index 52a0d69744..9ed9fe4d76 100644
--- a/examples/src/main/scala/spark/examples/SparkLR.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkLR.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import java.util.Random
import scala.math.exp
-import spark.util.Vector
-import spark._
+import org.apache.spark.util.Vector
+import org.apache.spark._
/**
* Logistic regression based classification.
diff --git a/examples/src/main/scala/spark/examples/SparkPageRank.scala b/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
index dedbbd01a3..2721caf08b 100644
--- a/examples/src/main/scala/spark/examples/SparkPageRank.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkPageRank.scala
@@ -1,7 +1,7 @@
-package spark.examples
+package org.apache.spark.examples
-import spark.SparkContext._
-import spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.SparkContext
/**
diff --git a/examples/src/main/scala/spark/examples/SparkPi.scala b/examples/src/main/scala/org/apache/spark/examples/SparkPi.scala
index 00560ac9d1..5a2bc9b0d0 100644
--- a/examples/src/main/scala/spark/examples/SparkPi.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkPi.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
import scala.math.random
-import spark._
+import org.apache.spark._
import SparkContext._
/** Computes an approximation to pi */
diff --git a/examples/src/main/scala/spark/examples/SparkTC.scala b/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
index bf988a953b..5a7a9d1bd8 100644
--- a/examples/src/main/scala/spark/examples/SparkTC.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.examples
+package org.apache.spark.examples
-import spark._
+import org.apache.spark._
import SparkContext._
import scala.util.Random
import scala.collection.mutable
diff --git a/examples/src/main/scala/spark/examples/bagel/PageRankUtils.scala b/examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala
index c23ee9895f..b190e83c4d 100644
--- a/examples/src/main/scala/spark/examples/bagel/PageRankUtils.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/bagel/PageRankUtils.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.examples.bagel
+package org.apache.spark.examples.bagel
-import spark._
-import spark.SparkContext._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
-import spark.bagel._
-import spark.bagel.Bagel._
+import org.apache.spark.bagel._
+import org.apache.spark.bagel.Bagel._
import scala.collection.mutable.ArrayBuffer
diff --git a/examples/src/main/scala/spark/examples/bagel/WikipediaPageRank.scala b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRank.scala
index 00635a7ffa..b1f606e48e 100644
--- a/examples/src/main/scala/spark/examples/bagel/WikipediaPageRank.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRank.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.examples.bagel
+package org.apache.spark.examples.bagel
-import spark._
-import spark.SparkContext._
+import org.apache.spark._
+import org.apache.spark.SparkContext._
-import spark.bagel._
-import spark.bagel.Bagel._
+import org.apache.spark.bagel._
+import org.apache.spark.bagel.Bagel._
import scala.xml.{XML,NodeSeq}
@@ -37,7 +37,7 @@ object WikipediaPageRank {
System.exit(-1)
}
- System.setProperty("spark.serializer", "spark.KryoSerializer")
+ System.setProperty("spark.serializer", "org.apache.spark.KryoSerializer")
System.setProperty("spark.kryo.registrator", classOf[PRKryoRegistrator].getName)
val inputFile = args(0)
diff --git a/examples/src/main/scala/spark/examples/bagel/WikipediaPageRankStandalone.scala b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala
index c416ddbc58..3bfa48eaf3 100644
--- a/examples/src/main/scala/spark/examples/bagel/WikipediaPageRankStandalone.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/bagel/WikipediaPageRankStandalone.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.examples.bagel
+package org.apache.spark.examples.bagel
-import spark._
+import org.apache.spark._
import serializer.{DeserializationStream, SerializationStream, SerializerInstance}
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
-import spark.bagel._
-import spark.bagel.Bagel._
+import org.apache.spark.bagel._
+import org.apache.spark.bagel.Bagel._
import scala.xml.{XML,NodeSeq}
@@ -131,7 +131,7 @@ object WikipediaPageRankStandalone {
}
}
-class WPRSerializer extends spark.serializer.Serializer {
+class WPRSerializer extends org.apache.spark.serializer.Serializer {
def newInstance(): SerializerInstance = new WPRSerializerInstance()
}
diff --git a/examples/src/main/scala/spark/streaming/examples/ActorWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/ActorWordCount.scala
index 05d3176478..cd3423a07b 100644
--- a/examples/src/main/scala/spark/streaming/examples/ActorWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/ActorWordCount.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
import scala.collection.mutable.LinkedList
import scala.util.Random
@@ -25,11 +25,11 @@ import akka.actor.ActorRef
import akka.actor.Props
import akka.actor.actorRef2Scala
-import spark.streaming.Seconds
-import spark.streaming.StreamingContext
-import spark.streaming.StreamingContext.toPairDStreamFunctions
-import spark.streaming.receivers.Receiver
-import spark.util.AkkaUtils
+import org.apache.spark.streaming.Seconds
+import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.streaming.StreamingContext.toPairDStreamFunctions
+import org.apache.spark.streaming.receivers.Receiver
+import org.apache.spark.util.AkkaUtils
case class SubscribeReceiver(receiverActor: ActorRef)
case class UnsubscribeReceiver(receiverActor: ActorRef)
@@ -80,7 +80,7 @@ class FeederActor extends Actor {
* goes and subscribe to a typical publisher/feeder actor and receives
* data.
*
- * @see [[spark.streaming.examples.FeederActor]]
+ * @see [[org.apache.spark.streaming.examples.FeederActor]]
*/
class SampleActorReceiver[T: ClassManifest](urlOfPublisher: String)
extends Actor with Receiver {
diff --git a/examples/src/main/scala/spark/streaming/examples/FlumeEventCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/FlumeEventCount.scala
index 3ab4fc2c37..9f6e163454 100644
--- a/examples/src/main/scala/spark/streaming/examples/FlumeEventCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/FlumeEventCount.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
-import spark.util.IntParam
-import spark.storage.StorageLevel
-import spark.streaming._
+import org.apache.spark.util.IntParam
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming._
/**
* Produces a count of events received from Flume.
diff --git a/examples/src/main/scala/spark/streaming/examples/HdfsWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/HdfsWordCount.scala
index 30af01a26f..bc8564b3ba 100644
--- a/examples/src/main/scala/spark/streaming/examples/HdfsWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/HdfsWordCount.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
-import spark.streaming.{Seconds, StreamingContext}
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.StreamingContext._
/**
diff --git a/examples/src/main/scala/spark/streaming/examples/KafkaWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/KafkaWordCount.scala
index d9c76d1a33..12f939d5a7 100644
--- a/examples/src/main/scala/spark/streaming/examples/KafkaWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/KafkaWordCount.scala
@@ -15,17 +15,17 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
import java.util.Properties
import kafka.message.Message
import kafka.producer.SyncProducerConfig
import kafka.producer._
-import spark.SparkContext
-import spark.streaming._
-import spark.streaming.StreamingContext._
-import spark.storage.StorageLevel
-import spark.streaming.util.RawTextHelper._
+import org.apache.spark.SparkContext
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.util.RawTextHelper._
/**
* Consumes messages from one or more topics in Kafka and does wordcount.
diff --git a/examples/src/main/scala/spark/streaming/examples/NetworkWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala
index b29d79aac5..e2487dca5f 100644
--- a/examples/src/main/scala/spark/streaming/examples/NetworkWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/NetworkWordCount.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
-import spark.streaming.{Seconds, StreamingContext}
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.StreamingContext._
/**
* Counts words in UTF8 encoded, '\n' delimited text received from the network every second.
diff --git a/examples/src/main/scala/spark/streaming/examples/QueueStream.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/QueueStream.scala
index da36c8c23c..822da8c9b5 100644
--- a/examples/src/main/scala/spark/streaming/examples/QueueStream.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/QueueStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
-import spark.RDD
-import spark.streaming.{Seconds, StreamingContext}
-import spark.streaming.StreamingContext._
+import org.apache.spark.RDD
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.StreamingContext._
import scala.collection.mutable.SynchronizedQueue
diff --git a/examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/RawNetworkGrep.scala
index 7fb680bcc3..2e3d9ccf00 100644
--- a/examples/src/main/scala/spark/streaming/examples/RawNetworkGrep.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/RawNetworkGrep.scala
@@ -15,20 +15,20 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
-import spark.util.IntParam
-import spark.storage.StorageLevel
+import org.apache.spark.util.IntParam
+import org.apache.spark.storage.StorageLevel
-import spark.streaming._
-import spark.streaming.util.RawTextHelper
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.util.RawTextHelper
/**
* Receives text from multiple rawNetworkStreams and counts how many '\n' delimited
* lines have the word 'the' in them. This is useful for benchmarking purposes. This
* will only work with spark.streaming.util.RawTextSender running on all worker nodes
* and with Spark using Kryo serialization (set Java property "spark.serializer" to
- * "spark.KryoSerializer").
+ * "org.apache.spark.KryoSerializer").
* Usage: RawNetworkGrep <master> <numStreams> <host> <port> <batchMillis>
* <master> is the Spark master URL
* <numStream> is the number rawNetworkStreams, which should be same as number
diff --git a/examples/src/main/scala/spark/streaming/examples/StatefulNetworkWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/StatefulNetworkWordCount.scala
index b709fc3c87..cb30c4edb3 100644
--- a/examples/src/main/scala/spark/streaming/examples/StatefulNetworkWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/StatefulNetworkWordCount.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
-import spark.streaming._
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.StreamingContext._
/**
* Counts words cumulatively in UTF8 encoded, '\n' delimited text received from the network every second.
diff --git a/examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdCMS.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdCMS.scala
index 8770abd57e..35b6329ab3 100644
--- a/examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdCMS.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdCMS.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
-import spark.streaming.{Seconds, StreamingContext}
-import spark.storage.StorageLevel
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.storage.StorageLevel
import com.twitter.algebird._
-import spark.streaming.StreamingContext._
-import spark.SparkContext._
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.SparkContext._
/**
* Illustrates the use of the Count-Min Sketch, from Twitter's Algebird library, to compute
diff --git a/examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdHLL.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdHLL.scala
index cba5c986be..8bfde2a829 100644
--- a/examples/src/main/scala/spark/streaming/examples/TwitterAlgebirdHLL.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterAlgebirdHLL.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
-import spark.streaming.{Seconds, StreamingContext}
-import spark.storage.StorageLevel
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.storage.StorageLevel
import com.twitter.algebird.HyperLogLog._
import com.twitter.algebird.HyperLogLogMonoid
-import spark.streaming.dstream.TwitterInputDStream
+import org.apache.spark.streaming.dstream.TwitterInputDStream
/**
* Illustrates the use of the HyperLogLog algorithm, from Twitter's Algebird library, to compute
diff --git a/examples/src/main/scala/spark/streaming/examples/TwitterPopularTags.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterPopularTags.scala
index 682b99f75e..27aa6b14bf 100644
--- a/examples/src/main/scala/spark/streaming/examples/TwitterPopularTags.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterPopularTags.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
-import spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.{Seconds, StreamingContext}
import StreamingContext._
-import spark.SparkContext._
+import org.apache.spark.SparkContext._
/**
* Calculates popular hashtags (topics) over sliding 10 and 60 second windows from a Twitter
diff --git a/examples/src/main/scala/spark/streaming/examples/ZeroMQWordCount.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/ZeroMQWordCount.scala
index a0cae06c30..c8743b9e25 100644
--- a/examples/src/main/scala/spark/streaming/examples/ZeroMQWordCount.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/ZeroMQWordCount.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.streaming.examples
+package org.apache.spark.streaming.examples
import akka.actor.ActorSystem
import akka.actor.actorRef2Scala
import akka.zeromq._
-import spark.streaming.{ Seconds, StreamingContext }
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.{ Seconds, StreamingContext }
+import org.apache.spark.streaming.StreamingContext._
import akka.zeromq.Subscribe
/**
diff --git a/examples/src/main/scala/spark/streaming/examples/clickstream/PageViewGenerator.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
index dd36bbbf32..884d6d6f34 100644
--- a/examples/src/main/scala/spark/streaming/examples/clickstream/PageViewGenerator.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewGenerator.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming.examples.clickstream
+package org.apache.spark.streaming.examples.clickstream
import java.net.{InetAddress,ServerSocket,Socket,SocketException}
import java.io.{InputStreamReader, BufferedReader, PrintWriter}
diff --git a/examples/src/main/scala/spark/streaming/examples/clickstream/PageViewStream.scala b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewStream.scala
index 152da23489..8282cc9269 100644
--- a/examples/src/main/scala/spark/streaming/examples/clickstream/PageViewStream.scala
+++ b/examples/src/main/scala/org/apache/spark/streaming/examples/clickstream/PageViewStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.examples.clickstream
+package org.apache.spark.streaming.examples.clickstream
-import spark.streaming.{Seconds, StreamingContext}
-import spark.streaming.StreamingContext._
-import spark.SparkContext._
+import org.apache.spark.streaming.{Seconds, StreamingContext}
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.SparkContext._
/** Analyses a streaming dataset of web page views. This class demonstrates several types of
* operators available in Spark streaming.
diff --git a/mllib/pom.xml b/mllib/pom.xml
index ab31d5734e..2d5d3c00d1 100644
--- a/mllib/pom.xml
+++ b/mllib/pom.xml
@@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-mllib</artifactId>
<packaging>jar</packaging>
<name>Spark Project ML Library</name>
@@ -33,7 +33,7 @@
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/mllib/src/main/scala/spark/mllib/classification/ClassificationModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
index 70fae8c15a..4f4a7f5296 100644
--- a/mllib/src/main/scala/spark/mllib/classification/ClassificationModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/classification/ClassificationModel.scala
@@ -1,6 +1,6 @@
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
-import spark.RDD
+import org.apache.spark.RDD
trait ClassificationModel extends Serializable {
/**
diff --git a/mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
index 482e4a6745..91bb50c829 100644
--- a/mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/classification/LogisticRegression.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
import scala.math.round
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.regression._
-import spark.mllib.util.MLUtils
-import spark.mllib.util.DataValidators
+import org.apache.spark.{Logging, RDD, SparkContext}
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.regression._
+import org.apache.spark.mllib.util.MLUtils
+import org.apache.spark.mllib.util.DataValidators
import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/classification/SVM.scala b/mllib/src/main/scala/org/apache/spark/mllib/classification/SVM.scala
index 69393cd7b0..c92c7cc3f3 100644
--- a/mllib/src/main/scala/spark/mllib/classification/SVM.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/classification/SVM.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
import scala.math.signum
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.regression._
-import spark.mllib.util.MLUtils
-import spark.mllib.util.DataValidators
+import org.apache.spark.{Logging, RDD, SparkContext}
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.regression._
+import org.apache.spark.mllib.util.MLUtils
+import org.apache.spark.mllib.util.DataValidators
import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/clustering/KMeans.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala
index 97e3d110ae..2c3db099fa 100644
--- a/mllib/src/main/scala/spark/mllib/clustering/KMeans.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeans.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.mllib.clustering
+package org.apache.spark.mllib.clustering
import scala.collection.mutable.ArrayBuffer
import scala.util.Random
-import spark.{SparkContext, RDD}
-import spark.SparkContext._
-import spark.Logging
-import spark.mllib.util.MLUtils
+import org.apache.spark.{SparkContext, RDD}
+import org.apache.spark.SparkContext._
+import org.apache.spark.Logging
+import org.apache.spark.mllib.util.MLUtils
import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/clustering/KMeansModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala
index b8f80e80cd..d1fe5d138d 100644
--- a/mllib/src/main/scala/spark/mllib/clustering/KMeansModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/KMeansModel.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.mllib.clustering
+package org.apache.spark.mllib.clustering
-import spark.RDD
-import spark.SparkContext._
-import spark.mllib.util.MLUtils
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.util.MLUtils
/**
diff --git a/mllib/src/main/scala/spark/mllib/clustering/LocalKMeans.scala b/mllib/src/main/scala/org/apache/spark/mllib/clustering/LocalKMeans.scala
index 89fe7d7e85..baf8251d8f 100644
--- a/mllib/src/main/scala/spark/mllib/clustering/LocalKMeans.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/clustering/LocalKMeans.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.clustering
+package org.apache.spark.mllib.clustering
import scala.util.Random
diff --git a/mllib/src/main/scala/spark/mllib/optimization/Gradient.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Gradient.scala
index 05568f55af..749e7364f4 100644
--- a/mllib/src/main/scala/spark/mllib/optimization/Gradient.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Gradient.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.optimization
+package org.apache.spark.mllib.optimization
import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/optimization/GradientDescent.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
index 31917df7e8..b62c9b3340 100644
--- a/mllib/src/main/scala/spark/mllib/optimization/GradientDescent.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/GradientDescent.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.mllib.optimization
+package org.apache.spark.mllib.optimization
-import spark.{Logging, RDD, SparkContext}
-import spark.SparkContext._
+import org.apache.spark.{Logging, RDD, SparkContext}
+import org.apache.spark.SparkContext._
import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/optimization/Optimizer.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Optimizer.scala
index 76a519c338..50059d385d 100644
--- a/mllib/src/main/scala/spark/mllib/optimization/Optimizer.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Optimizer.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.mllib.optimization
+package org.apache.spark.mllib.optimization
-import spark.RDD
+import org.apache.spark.RDD
trait Optimizer {
diff --git a/mllib/src/main/scala/spark/mllib/optimization/Updater.scala b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Updater.scala
index db67d6b0bc..4c51f4f881 100644
--- a/mllib/src/main/scala/spark/mllib/optimization/Updater.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/optimization/Updater.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.optimization
+package org.apache.spark.mllib.optimization
import scala.math._
import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/recommendation/ALS.scala b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala
index dbfbf59975..218217acfe 100644
--- a/mllib/src/main/scala/spark/mllib/recommendation/ALS.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/ALS.scala
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package spark.mllib.recommendation
+package org.apache.spark.mllib.recommendation
import scala.collection.mutable.{ArrayBuffer, BitSet}
import scala.util.Random
import scala.util.Sorting
-import spark.{HashPartitioner, Partitioner, SparkContext, RDD}
-import spark.storage.StorageLevel
-import spark.KryoRegistrator
-import spark.SparkContext._
+import org.apache.spark.{HashPartitioner, Partitioner, SparkContext, RDD}
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.KryoRegistrator
+import org.apache.spark.SparkContext._
import com.esotericsoftware.kryo.Kryo
import org.jblas.{DoubleMatrix, SimpleBlas, Solve}
@@ -432,7 +432,7 @@ object ALS {
val (master, ratingsFile, rank, iters, outputDir) =
(args(0), args(1), args(2).toInt, args(3).toInt, args(4))
val blocks = if (args.length == 6) args(5).toInt else -1
- System.setProperty("spark.serializer", "spark.KryoSerializer")
+ System.setProperty("spark.serializer", "org.apache.spark.KryoSerializer")
System.setProperty("spark.kryo.registrator", classOf[ALSRegistrator].getName)
System.setProperty("spark.kryo.referenceTracking", "false")
System.setProperty("spark.kryoserializer.buffer.mb", "8")
diff --git a/mllib/src/main/scala/spark/mllib/recommendation/MatrixFactorizationModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala
index 5e21717da5..ae9fe48aec 100644
--- a/mllib/src/main/scala/spark/mllib/recommendation/MatrixFactorizationModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.mllib.recommendation
+package org.apache.spark.mllib.recommendation
-import spark.RDD
-import spark.SparkContext._
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext._
import org.jblas._
diff --git a/mllib/src/main/scala/spark/mllib/regression/GeneralizedLinearAlgorithm.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/GeneralizedLinearAlgorithm.scala
index d164d415d6..06015110ac 100644
--- a/mllib/src/main/scala/spark/mllib/regression/GeneralizedLinearAlgorithm.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/GeneralizedLinearAlgorithm.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
-import spark.{Logging, RDD, SparkException}
-import spark.mllib.optimization._
+import org.apache.spark.{Logging, RDD, SparkException}
+import org.apache.spark.mllib.optimization._
import org.jblas.DoubleMatrix
@@ -52,7 +52,7 @@ abstract class GeneralizedLinearModel(val weights: Array[Double], val intercept:
* @param testData RDD representing data points to be predicted
* @return RDD[Double] where each entry contains the corresponding prediction
*/
- def predict(testData: spark.RDD[Array[Double]]): RDD[Double] = {
+ def predict(testData: RDD[Array[Double]]): RDD[Double] = {
// A small optimization to avoid serializing the entire model. Only the weightsMatrix
// and intercept is needed.
val localWeights = weightsMatrix
diff --git a/mllib/src/main/scala/spark/mllib/regression/LabeledPoint.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala
index 3de60482c5..63240e24dc 100644
--- a/mllib/src/main/scala/spark/mllib/regression/LabeledPoint.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/LabeledPoint.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
/**
* Class that represents the features and labels of a data point.
diff --git a/mllib/src/main/scala/spark/mllib/regression/Lasso.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala
index 0f33456ef4..df3beb1959 100644
--- a/mllib/src/main/scala/spark/mllib/regression/Lasso.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/Lasso.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.util.MLUtils
+import org.apache.spark.{Logging, RDD, SparkContext}
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.util.MLUtils
import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/regression/LinearRegression.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala
index 885ff5a30d..71f968471c 100644
--- a/mllib/src/main/scala/spark/mllib/regression/LinearRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/LinearRegression.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.util.MLUtils
+import org.apache.spark.{Logging, RDD, SparkContext}
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.util.MLUtils
import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/regression/RegressionModel.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/RegressionModel.scala
index b845ba1a89..8dd325efc0 100644
--- a/mllib/src/main/scala/spark/mllib/regression/RegressionModel.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/RegressionModel.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
-import spark.RDD
+import org.apache.spark.RDD
trait RegressionModel extends Serializable {
/**
diff --git a/mllib/src/main/scala/spark/mllib/regression/RidgeRegression.scala b/mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala
index cb1303dd99..228ab9e4e8 100644
--- a/mllib/src/main/scala/spark/mllib/regression/RidgeRegression.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/regression/RidgeRegression.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
-import spark.{Logging, RDD, SparkContext}
-import spark.mllib.optimization._
-import spark.mllib.util.MLUtils
+import org.apache.spark.{Logging, RDD, SparkContext}
+import org.apache.spark.mllib.optimization._
+import org.apache.spark.mllib.util.MLUtils
import org.jblas.DoubleMatrix
diff --git a/mllib/src/main/scala/spark/mllib/util/DataValidators.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/DataValidators.scala
index 57553accf1..7fd4623071 100644
--- a/mllib/src/main/scala/spark/mllib/util/DataValidators.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/DataValidators.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.mllib.util
+package org.apache.spark.mllib.util
-import spark.{RDD, Logging}
-import spark.mllib.regression.LabeledPoint
+import org.apache.spark.{RDD, Logging}
+import org.apache.spark.mllib.regression.LabeledPoint
/**
* A collection of methods used to validate data before applying ML algorithms.
diff --git a/mllib/src/main/scala/spark/mllib/util/KMeansDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/KMeansDataGenerator.scala
index 672b63f65a..6500d47183 100644
--- a/mllib/src/main/scala/spark/mllib/util/KMeansDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/KMeansDataGenerator.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.mllib.util
+package org.apache.spark.mllib.util
import scala.util.Random
-import spark.{RDD, SparkContext}
+import org.apache.spark.{RDD, SparkContext}
/**
* Generate test data for KMeans. This class first chooses k cluster centers
diff --git a/mllib/src/main/scala/spark/mllib/util/LinearDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala
index 9f48477f84..4c49d484b4 100644
--- a/mllib/src/main/scala/spark/mllib/util/LinearDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/LinearDataGenerator.scala
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package spark.mllib.util
+package org.apache.spark.mllib.util
import scala.collection.JavaConversions._
import scala.util.Random
import org.jblas.DoubleMatrix
-import spark.{RDD, SparkContext}
-import spark.mllib.regression.LabeledPoint
-import spark.mllib.regression.LabeledPoint
+import org.apache.spark.{RDD, SparkContext}
+import org.apache.spark.mllib.regression.LabeledPoint
+import org.apache.spark.mllib.regression.LabeledPoint
/**
* Generate sample data used for Linear Data. This class generates
diff --git a/mllib/src/main/scala/spark/mllib/util/LogisticRegressionDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/LogisticRegressionDataGenerator.scala
index d6402f23e2..f553298fc5 100644
--- a/mllib/src/main/scala/spark/mllib/util/LogisticRegressionDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/LogisticRegressionDataGenerator.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.mllib.util
+package org.apache.spark.mllib.util
import scala.util.Random
-import spark.{RDD, SparkContext}
-import spark.mllib.regression.LabeledPoint
+import org.apache.spark.{RDD, SparkContext}
+import org.apache.spark.mllib.regression.LabeledPoint
/**
* Generate test data for LogisticRegression. This class chooses positive labels
diff --git a/mllib/src/main/scala/spark/mllib/util/MFDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala
index 88992cde0c..7eb69ae81c 100644
--- a/mllib/src/main/scala/spark/mllib/util/MFDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/MFDataGenerator.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.mllib.recommendation
+package org.apache.spark.mllib.recommendation
import scala.util.Random
import org.jblas.DoubleMatrix
-import spark.{RDD, SparkContext}
-import spark.mllib.util.MLUtils
+import org.apache.spark.{RDD, SparkContext}
+import org.apache.spark.mllib.util.MLUtils
/**
* Generate RDD(s) containing data for Matrix Factorization.
@@ -110,4 +110,4 @@ object MFDataGenerator{
sc.stop()
}
-} \ No newline at end of file
+}
diff --git a/mllib/src/main/scala/spark/mllib/util/MLUtils.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/MLUtils.scala
index a8e6ae9953..0aeafbe23c 100644
--- a/mllib/src/main/scala/spark/mllib/util/MLUtils.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/MLUtils.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.mllib.util
+package org.apache.spark.mllib.util
-import spark.{RDD, SparkContext}
-import spark.SparkContext._
+import org.apache.spark.{RDD, SparkContext}
+import org.apache.spark.SparkContext._
import org.jblas.DoubleMatrix
-import spark.mllib.regression.LabeledPoint
+import org.apache.spark.mllib.regression.LabeledPoint
/**
* Helper methods to load, save and pre-process data used in ML Lib.
diff --git a/mllib/src/main/scala/spark/mllib/util/SVMDataGenerator.scala b/mllib/src/main/scala/org/apache/spark/mllib/util/SVMDataGenerator.scala
index eff456cad6..d3f191b05b 100644
--- a/mllib/src/main/scala/spark/mllib/util/SVMDataGenerator.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/util/SVMDataGenerator.scala
@@ -1,11 +1,11 @@
-package spark.mllib.util
+package org.apache.spark.mllib.util
import scala.util.Random
-import spark.{RDD, SparkContext}
+import org.apache.spark.{RDD, SparkContext}
import org.jblas.DoubleMatrix
-import spark.mllib.regression.LabeledPoint
+import org.apache.spark.mllib.regression.LabeledPoint
/**
* Generate sample data used for SVM. This class generates uniform random values
diff --git a/mllib/src/test/java/spark/mllib/classification/JavaLogisticRegressionSuite.java b/mllib/src/test/java/org/apache/spark/mllib/classification/JavaLogisticRegressionSuite.java
index e0ebd45cd8..e18e3bc6a8 100644
--- a/mllib/src/test/java/spark/mllib/classification/JavaLogisticRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/classification/JavaLogisticRegressionSuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.classification;
+package org.apache.spark.mllib.classification;
import java.io.Serializable;
import java.util.List;
@@ -25,10 +25,10 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
-import spark.mllib.regression.LabeledPoint;
+import org.apache.spark.mllib.regression.LabeledPoint;
public class JavaLogisticRegressionSuite implements Serializable {
private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/classification/JavaSVMSuite.java b/mllib/src/test/java/org/apache/spark/mllib/classification/JavaSVMSuite.java
index 7881b3c38f..117e5eaa8b 100644
--- a/mllib/src/test/java/spark/mllib/classification/JavaSVMSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/classification/JavaSVMSuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.classification;
+package org.apache.spark.mllib.classification;
import java.io.Serializable;
@@ -26,10 +26,10 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
-import spark.mllib.regression.LabeledPoint;
+import org.apache.spark.mllib.regression.LabeledPoint;
public class JavaSVMSuite implements Serializable {
private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/clustering/JavaKMeansSuite.java b/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaKMeansSuite.java
index 3f2d82bfb4..32d3934ac1 100644
--- a/mllib/src/test/java/spark/mllib/clustering/JavaKMeansSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/clustering/JavaKMeansSuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.clustering;
+package org.apache.spark.mllib.clustering;
import java.io.Serializable;
import java.util.ArrayList;
@@ -26,8 +26,8 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
public class JavaKMeansSuite implements Serializable {
private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/recommendation/JavaALSSuite.java b/mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java
index 7993629a6d..3323f6cee2 100644
--- a/mllib/src/test/java/spark/mllib/recommendation/JavaALSSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/recommendation/JavaALSSuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.recommendation;
+package org.apache.spark.mllib.recommendation;
import java.io.Serializable;
import java.util.List;
@@ -27,8 +27,8 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
import org.jblas.DoubleMatrix;
diff --git a/mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaLassoSuite.java
index 5863140baf..f44b25cd44 100644
--- a/mllib/src/test/java/spark/mllib/regression/JavaLassoSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaLassoSuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.regression;
+package org.apache.spark.mllib.regression;
import java.io.Serializable;
import java.util.List;
@@ -25,9 +25,9 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.mllib.util.LinearDataGenerator;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.mllib.util.LinearDataGenerator;
public class JavaLassoSuite implements Serializable {
private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaLinearRegressionSuite.java
index 50716c7861..5a4410a632 100644
--- a/mllib/src/test/java/spark/mllib/regression/JavaLinearRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaLinearRegressionSuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.regression;
+package org.apache.spark.mllib.regression;
import java.io.Serializable;
import java.util.List;
@@ -25,9 +25,9 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.mllib.util.LinearDataGenerator;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.mllib.util.LinearDataGenerator;
public class JavaLinearRegressionSuite implements Serializable {
private transient JavaSparkContext sc;
diff --git a/mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaRidgeRegressionSuite.java
index 2c0aabad30..2fdd5fc8fd 100644
--- a/mllib/src/test/java/spark/mllib/regression/JavaRidgeRegressionSuite.java
+++ b/mllib/src/test/java/org/apache/spark/mllib/regression/JavaRidgeRegressionSuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.regression;
+package org.apache.spark.mllib.regression;
import java.io.Serializable;
import java.util.List;
@@ -27,9 +27,9 @@ import org.junit.Test;
import org.jblas.DoubleMatrix;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaSparkContext;
-import spark.mllib.util.LinearDataGenerator;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.mllib.util.LinearDataGenerator;
public class JavaRidgeRegressionSuite implements Serializable {
private transient JavaSparkContext sc;
diff --git a/mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
index bd87c528c3..34c67294e9 100644
--- a/mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
import scala.util.Random
import scala.collection.JavaConversions._
@@ -24,8 +24,8 @@ import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
-import spark.SparkContext
-import spark.mllib.regression._
+import org.apache.spark.SparkContext
+import org.apache.spark.mllib.regression._
object LogisticRegressionSuite {
diff --git a/mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala
index 894ae458ad..6a957e3ddc 100644
--- a/mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/classification/SVMSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.classification
+package org.apache.spark.mllib.classification
import scala.util.Random
import scala.math.signum
@@ -24,11 +24,11 @@ import scala.collection.JavaConversions._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
-import spark.SparkContext
-import spark.mllib.regression._
-
import org.jblas.DoubleMatrix
+import org.apache.spark.{SparkException, SparkContext}
+import org.apache.spark.mllib.regression._
+
object SVMSuite {
def generateSVMInputAsList(
@@ -159,7 +159,7 @@ class SVMSuite extends FunSuite with BeforeAndAfterAll {
}
}
- intercept[spark.SparkException] {
+ intercept[SparkException] {
val model = SVMWithSGD.train(testRDDInvalid, 100)
}
diff --git a/mllib/src/test/scala/spark/mllib/clustering/KMeansSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala
index d5d95c8639..94245f6027 100644
--- a/mllib/src/test/scala/spark/mllib/clustering/KMeansSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala
@@ -15,15 +15,15 @@
* limitations under the License.
*/
-package spark.mllib.clustering
+package org.apache.spark.mllib.clustering
import scala.util.Random
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
import org.jblas._
diff --git a/mllib/src/test/scala/spark/mllib/recommendation/ALSSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala
index 15a60efda6..347ef238f4 100644
--- a/mllib/src/test/scala/spark/mllib/recommendation/ALSSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.recommendation
+package org.apache.spark.mllib.recommendation
import scala.collection.JavaConversions._
import scala.util.Random
@@ -23,8 +23,8 @@ import scala.util.Random
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
import org.jblas._
diff --git a/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/regression/LassoSuite.scala
index 622dbbab7f..db980c7bae 100644
--- a/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/regression/LassoSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
import scala.collection.JavaConversions._
import scala.util.Random
@@ -23,8 +23,8 @@ import scala.util.Random
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
-import spark.SparkContext
-import spark.mllib.util.LinearDataGenerator
+import org.apache.spark.SparkContext
+import org.apache.spark.mllib.util.LinearDataGenerator
class LassoSuite extends FunSuite with BeforeAndAfterAll {
diff --git a/mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala
index acc48a3283..ef500c704c 100644
--- a/mllib/src/test/scala/spark/mllib/regression/LinearRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/regression/LinearRegressionSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
-import spark.SparkContext
-import spark.SparkContext._
-import spark.mllib.util.LinearDataGenerator
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.util.LinearDataGenerator
class LinearRegressionSuite extends FunSuite with BeforeAndAfterAll {
@transient private var sc: SparkContext = _
diff --git a/mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala
index c482035706..c18092d804 100644
--- a/mllib/src/test/scala/spark/mllib/regression/RidgeRegressionSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/mllib/regression/RidgeRegressionSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.mllib.regression
+package org.apache.spark.mllib.regression
import scala.collection.JavaConversions._
import scala.util.Random
@@ -24,9 +24,9 @@ import org.jblas.DoubleMatrix
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
-import spark.SparkContext
-import spark.SparkContext._
-import spark.mllib.util.LinearDataGenerator
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.mllib.util.LinearDataGenerator
class RidgeRegressionSuite extends FunSuite with BeforeAndAfterAll {
@transient private var sc: SparkContext = _
diff --git a/pom.xml b/pom.xml
index e2fd54a966..9230611eae 100644
--- a/pom.xml
+++ b/pom.xml
@@ -18,22 +18,22 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>Spark Project Parent POM</name>
- <url>http://spark-project.org/</url>
+ <url>http://spark.incubator.apache.org/</url>
<licenses>
<license>
- <name>BSD License</name>
- <url>https://github.com/mesos/spark/blob/master/LICENSE</url>
+ <name>Apache 2.0 License</name>
+ <url>http://www.apache.org/licenses/LICENSE-2.0.html</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
- <connection>scm:git:git@github.com:mesos/spark.git</connection>
- <url>scm:git:git@github.com:mesos/spark.git</url>
+ <connection>scm:git:git@github.com:apache/incubator-spark.git</connection>
+ <url>scm:git:git@github.com:apache/incubator-spark.git</url>
</scm>
<developers>
<developer>
@@ -46,7 +46,7 @@
</developer>
</developers>
<issueManagement>
- <system>github</system>
+ <system>JIRA</system>
<url>https://spark-project.atlassian.net/browse/SPARK</url>
</issueManagement>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 2e26812671..18e86d2cae 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -74,7 +74,7 @@ object SparkBuild extends Build {
core, repl, examples, bagel, streaming, mllib, tools, assemblyProj) ++ maybeYarnRef
def sharedSettings = Defaults.defaultSettings ++ Seq(
- organization := "org.spark-project",
+ organization := "org.apache.spark",
version := "0.8.0-SNAPSHOT",
scalaVersion := "2.9.3",
scalacOptions := Seq("-unchecked", "-optimize", "-deprecation"),
@@ -103,7 +103,7 @@ object SparkBuild extends Build {
//useGpg in Global := true,
pomExtra := (
- <url>http://spark-project.org/</url>
+ <url>http://spark.incubator.apache.org/</url>
<licenses>
<license>
<name>Apache 2.0 License</name>
@@ -112,8 +112,8 @@ object SparkBuild extends Build {
</license>
</licenses>
<scm>
- <connection>scm:git:git@github.com:mesos/spark.git</connection>
- <url>scm:git:git@github.com:mesos/spark.git</url>
+ <connection>scm:git:git@github.com:apache/incubator-spark.git</connection>
+ <url>scm:git:git@github.com:apache/incubator-spark.git</url>
</scm>
<developers>
<developer>
@@ -125,6 +125,10 @@ object SparkBuild extends Build {
<organizationUrl>http://www.cs.berkeley.edu/</organizationUrl>
</developer>
</developers>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://spark-project.atlassian.net/browse/SPARK</url>
+ </issueManagement>
),
/*
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 2803ce90f3..906e9221a1 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -114,9 +114,9 @@ class SparkContext(object):
self.addPyFile(path)
# Create a temporary directory inside spark.local.dir:
- local_dir = self._jvm.spark.Utils.getLocalDir()
+ local_dir = self._jvm.org.apache.spark.Utils.getLocalDir()
self._temp_dir = \
- self._jvm.spark.Utils.createTempDir(local_dir).getAbsolutePath()
+ self._jvm.org.apache.spark.Utils.createTempDir(local_dir).getAbsolutePath()
@property
def defaultParallelism(self):
diff --git a/python/pyspark/files.py b/python/pyspark/files.py
index 89bcbcfe06..57ee14eeb7 100644
--- a/python/pyspark/files.py
+++ b/python/pyspark/files.py
@@ -52,4 +52,4 @@ class SparkFiles(object):
return cls._root_directory
else:
# This will have to change if we support multiple SparkContexts:
- return cls._sc._jvm.spark.SparkFiles.getRootDirectory()
+ return cls._sc._jvm.org.apache.spark.SparkFiles.getRootDirectory()
diff --git a/python/pyspark/java_gateway.py b/python/pyspark/java_gateway.py
index 3ccf062c86..26fbe0f080 100644
--- a/python/pyspark/java_gateway.py
+++ b/python/pyspark/java_gateway.py
@@ -53,7 +53,7 @@ def launch_gateway():
# Connect to the gateway
gateway = JavaGateway(GatewayClient(port=port), auto_convert=False)
# Import the classes used by PySpark
- java_import(gateway.jvm, "spark.api.java.*")
- java_import(gateway.jvm, "spark.api.python.*")
+ java_import(gateway.jvm, "org.apache.spark.api.java.*")
+ java_import(gateway.jvm, "org.apache.spark.api.python.*")
java_import(gateway.jvm, "scala.Tuple2")
return gateway
diff --git a/repl-bin/pom.xml b/repl-bin/pom.xml
index 919e35f240..6a1b09e8df 100644
--- a/repl-bin/pom.xml
+++ b/repl-bin/pom.xml
@@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-repl-bin</artifactId>
<packaging>pom</packaging>
<name>Spark Project REPL binary packaging</name>
@@ -39,18 +39,18 @@
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-bagel</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-repl</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
@@ -109,7 +109,7 @@
<id>hadoop2-yarn</id>
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-yarn</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/repl/pom.xml b/repl/pom.xml
index f800664cff..f6276f1895 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-repl</artifactId>
<packaging>jar</packaging>
<name>Spark Project REPL</name>
@@ -38,18 +38,18 @@
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-bagel</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-mllib</artifactId>
<version>${project.version}</version>
<scope>runtime</scope>
@@ -136,7 +136,7 @@
<id>hadoop2-yarn</id>
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-yarn</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/repl/src/main/scala/spark/repl/ExecutorClassLoader.scala b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
index 274bc585db..3e171849e3 100644
--- a/repl/src/main/scala/spark/repl/ExecutorClassLoader.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/ExecutorClassLoader.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.repl
+package org.apache.spark.repl
import java.io.{ByteArrayOutputStream, InputStream}
import java.net.{URI, URL, URLClassLoader, URLEncoder}
diff --git a/repl/src/main/scala/spark/repl/Main.scala b/repl/src/main/scala/org/apache/spark/repl/Main.scala
index d824d62fd1..17e149f8ab 100644
--- a/repl/src/main/scala/spark/repl/Main.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/Main.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.repl
+package org.apache.spark.repl
import scala.collection.mutable.Set
diff --git a/repl/src/main/scala/spark/repl/SparkHelper.scala b/repl/src/main/scala/org/apache/spark/repl/SparkHelper.scala
index d8fb7191b4..d8fb7191b4 100644
--- a/repl/src/main/scala/spark/repl/SparkHelper.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkHelper.scala
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 0cecbd71ad..193ccb48ee 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -3,7 +3,7 @@
* @author Alexander Spoon
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
@@ -22,8 +22,8 @@ import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
import interpreter._
import io.{ File, Sources }
-import spark.Logging
-import spark.SparkContext
+import org.apache.spark.Logging
+import org.apache.spark.SparkContext
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
@@ -816,13 +816,13 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
def initializeSpark() {
intp.beQuietDuring {
command("""
- spark.repl.Main.interp.out.println("Creating SparkContext...");
- spark.repl.Main.interp.out.flush();
- @transient val sc = spark.repl.Main.interp.createSparkContext();
- spark.repl.Main.interp.out.println("Spark context available as sc.");
- spark.repl.Main.interp.out.flush();
+ org.apache.spark.repl.Main.interp.out.println("Creating SparkContext...");
+ org.apache.spark.repl.Main.interp.out.flush();
+ @transient val sc = org.apache.spark.repl.Main.interp.createSparkContext();
+ org.apache.spark.repl.Main.interp.out.println("Spark context available as sc.");
+ org.apache.spark.repl.Main.interp.out.flush();
""")
- command("import spark.SparkContext._")
+ command("import org.apache.spark.SparkContext._")
}
echo("Type in expressions to have them evaluated.")
echo("Type :help for more information.")
diff --git a/repl/src/main/scala/spark/repl/SparkIMain.scala b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
index 43b6a6c950..7e244e48a2 100644
--- a/repl/src/main/scala/spark/repl/SparkIMain.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkIMain.scala
@@ -3,7 +3,7 @@
* @author Martin Odersky
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
@@ -27,9 +27,9 @@ import scala.util.control.Exception.{ ultimately }
import scala.reflect.NameTransformer
import SparkIMain._
-import spark.HttpServer
-import spark.Utils
-import spark.SparkEnv
+import org.apache.spark.HttpServer
+import org.apache.spark.Utils
+import org.apache.spark.SparkEnv
/** An interpreter for Scala code.
*
@@ -883,7 +883,7 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
val execution = lineManager.set(originalLine) {
// MATEI: set the right SparkEnv for our SparkContext, because
// this execution will happen in a separate thread
- val sc = spark.repl.Main.interp.sparkContext
+ val sc = org.apache.spark.repl.Main.interp.sparkContext
if (sc != null && sc.env != null)
SparkEnv.set(sc.env)
// Execute the line
diff --git a/repl/src/main/scala/spark/repl/SparkISettings.scala b/repl/src/main/scala/org/apache/spark/repl/SparkISettings.scala
index 8ebb01d146..605b7b259b 100644
--- a/repl/src/main/scala/spark/repl/SparkISettings.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkISettings.scala
@@ -3,7 +3,7 @@
* @author Alexander Spoon
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkImports.scala b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
index 5caf5ca51a..41a1731d60 100644
--- a/repl/src/main/scala/spark/repl/SparkImports.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkImports.scala
@@ -3,7 +3,7 @@
* @author Paul Phillips
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala b/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala
index 0069d8b2f4..fdc172d753 100644
--- a/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala
@@ -3,7 +3,7 @@
* @author Paul Phillips
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkJLineReader.scala b/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala
index ef6b6e092e..d9e1de105c 100644
--- a/repl/src/main/scala/spark/repl/SparkJLineReader.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala
@@ -3,7 +3,7 @@
* @author Stepan Koltsov
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala b/repl/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala
index 2980dfcd76..a3409bf665 100644
--- a/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkMemberHandlers.scala
@@ -3,7 +3,7 @@
* @author Martin Odersky
*/
-package spark.repl
+package org.apache.spark.repl
import scala.tools.nsc._
import scala.tools.nsc.interpreter._
diff --git a/repl/src/test/scala/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 80ae605558..8f9b632c0e 100644
--- a/repl/src/test/scala/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.repl
+package org.apache.spark.repl
import java.io._
import java.net.URLClassLoader
@@ -41,10 +41,10 @@ class ReplSuite extends FunSuite {
}
}
val interp = new SparkILoop(in, new PrintWriter(out), master)
- spark.repl.Main.interp = interp
+ org.apache.spark.repl.Main.interp = interp
val separator = System.getProperty("path.separator")
interp.process(Array("-classpath", paths.mkString(separator)))
- spark.repl.Main.interp = null
+ org.apache.spark.repl.Main.interp = null
if (interp.sparkContext != null) {
interp.sparkContext.stop()
}
diff --git a/spark-executor b/spark-executor
index 63692bd46c..2c07c54843 100755
--- a/spark-executor
+++ b/spark-executor
@@ -19,4 +19,4 @@
FWDIR="`dirname $0`"
echo "Running spark-executor with framework dir = $FWDIR"
-exec $FWDIR/spark-class spark.executor.MesosExecutorBackend
+exec $FWDIR/spark-class org.apache.spark.executor.MesosExecutorBackend
diff --git a/spark-shell b/spark-shell
index 4d379c5cfb..9608bd3f30 100755
--- a/spark-shell
+++ b/spark-shell
@@ -79,7 +79,7 @@ if [[ ! $? ]]; then
saved_stty=""
fi
-$FWDIR/spark-class $OPTIONS spark.repl.Main "$@"
+$FWDIR/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
# record the exit status lest it be overwritten:
# then reenable echo and propagate the code.
diff --git a/spark-shell.cmd b/spark-shell.cmd
index ec65eabb74..b9b4d4bfb2 100644
--- a/spark-shell.cmd
+++ b/spark-shell.cmd
@@ -19,4 +19,4 @@ rem
set FWDIR=%~dp0
set SPARK_LAUNCH_WITH_SCALA=1
-cmd /V /E /C %FWDIR%run2.cmd spark.repl.Main %*
+cmd /V /E /C %FWDIR%run2.cmd org.apache.spark.repl.Main %*
diff --git a/streaming/pom.xml b/streaming/pom.xml
index 5c0582d6fb..c8946313ee 100644
--- a/streaming/pom.xml
+++ b/streaming/pom.xml
@@ -19,13 +19,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-streaming</artifactId>
<packaging>jar</packaging>
<name>Spark Project Streaming</name>
@@ -41,7 +41,7 @@
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/streaming/src/main/scala/spark/streaming/Checkpoint.scala b/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
index 070d930b5e..2d8f072624 100644
--- a/streaming/src/main/scala/spark/streaming/Checkpoint.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Checkpoint.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
import java.io._
import java.util.concurrent.Executors
@@ -24,8 +24,8 @@ import java.util.concurrent.RejectedExecutionException
import org.apache.hadoop.fs.Path
import org.apache.hadoop.conf.Configuration
-import spark.Logging
-import spark.io.CompressionCodec
+import org.apache.spark.Logging
+import org.apache.spark.io.CompressionCodec
private[streaming]
diff --git a/streaming/src/main/scala/spark/streaming/DStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/DStream.scala
index 684d3abb56..362247cc38 100644
--- a/streaming/src/main/scala/spark/streaming/DStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/DStream.scala
@@ -15,14 +15,16 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
-import spark.streaming.dstream._
+import org.apache.spark.streaming.dstream._
import StreamingContext._
+import org.apache.spark.util.MetadataCleaner
+
//import Time._
-import spark.{RDD, Logging}
-import spark.storage.StorageLevel
+import org.apache.spark.{RDD, Logging}
+import org.apache.spark.storage.StorageLevel
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
@@ -34,7 +36,7 @@ import org.apache.hadoop.conf.Configuration
/**
* A Discretized Stream (DStream), the basic abstraction in Spark Streaming, is a continuous
- * sequence of RDDs (of the same type) representing a continuous stream of data (see [[spark.RDD]]
+ * sequence of RDDs (of the same type) representing a continuous stream of data (see [[org.apache.spark.RDD]]
* for more details on RDDs). DStreams can either be created from live data (such as, data from
* HDFS, Kafka or Flume) or it can be generated by transformation existing DStreams using operations
* such as `map`, `window` and `reduceByKeyAndWindow`. While a Spark Streaming program is running, each
@@ -42,7 +44,7 @@ import org.apache.hadoop.conf.Configuration
* by a parent DStream.
*
* This class contains the basic operations available on all DStreams, such as `map`, `filter` and
- * `window`. In addition, [[spark.streaming.PairDStreamFunctions]] contains operations available
+ * `window`. In addition, [[org.apache.spark.streaming.PairDStreamFunctions]] contains operations available
* only on DStreams of key-value pairs, such as `groupByKeyAndWindow` and `join`. These operations
* are automatically available on any DStream of the right type (e.g., DStream[(Int, Int)] through
* implicit conversions when `spark.streaming.StreamingContext._` is imported.
@@ -209,7 +211,7 @@ abstract class DStream[T: ClassManifest] (
checkpointDuration + "). Please set it to higher than " + checkpointDuration + "."
)
- val metadataCleanerDelay = spark.util.MetadataCleaner.getDelaySeconds
+ val metadataCleanerDelay = MetadataCleaner.getDelaySeconds
logInfo("metadataCleanupDelay = " + metadataCleanerDelay)
assert(
metadataCleanerDelay < 0 || rememberDuration.milliseconds < metadataCleanerDelay * 1000,
diff --git a/streaming/src/main/scala/spark/streaming/DStreamCheckpointData.scala b/streaming/src/main/scala/org/apache/spark/streaming/DStreamCheckpointData.scala
index 399ca1c63d..58a0da2870 100644
--- a/streaming/src/main/scala/spark/streaming/DStreamCheckpointData.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/DStreamCheckpointData.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
import org.apache.hadoop.fs.Path
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.conf.Configuration
import collection.mutable.HashMap
-import spark.Logging
+import org.apache.spark.Logging
diff --git a/streaming/src/main/scala/spark/streaming/DStreamGraph.scala b/streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala
index c09a332d44..b9a58fded6 100644
--- a/streaming/src/main/scala/spark/streaming/DStreamGraph.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/DStreamGraph.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
import dstream.InputDStream
import java.io.{ObjectInputStream, IOException, ObjectOutputStream}
import collection.mutable.ArrayBuffer
-import spark.Logging
+import org.apache.spark.Logging
final private[streaming] class DStreamGraph extends Serializable with Logging {
initLogging()
diff --git a/streaming/src/main/scala/spark/streaming/Duration.scala b/streaming/src/main/scala/org/apache/spark/streaming/Duration.scala
index 12a14e233d..290ad37812 100644
--- a/streaming/src/main/scala/spark/streaming/Duration.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Duration.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
-import spark.Utils
+import org.apache.spark.Utils
case class Duration (private val millis: Long) {
@@ -57,7 +57,7 @@ case class Duration (private val millis: Long) {
}
/**
- * Helper object that creates instance of [[spark.streaming.Duration]] representing
+ * Helper object that creates instance of [[org.apache.spark.streaming.Duration]] representing
* a given number of milliseconds.
*/
object Milliseconds {
@@ -65,7 +65,7 @@ object Milliseconds {
}
/**
- * Helper object that creates instance of [[spark.streaming.Duration]] representing
+ * Helper object that creates instance of [[org.apache.spark.streaming.Duration]] representing
* a given number of seconds.
*/
object Seconds {
@@ -73,7 +73,7 @@ object Seconds {
}
/**
- * Helper object that creates instance of [[spark.streaming.Duration]] representing
+ * Helper object that creates instance of [[org.apache.spark.streaming.Duration]] representing
* a given number of minutes.
*/
object Minutes {
diff --git a/streaming/src/main/scala/spark/streaming/Interval.scala b/streaming/src/main/scala/org/apache/spark/streaming/Interval.scala
index b30cd969e9..04c994c136 100644
--- a/streaming/src/main/scala/spark/streaming/Interval.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Interval.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
private[streaming]
class Interval(val beginTime: Time, val endTime: Time) {
diff --git a/streaming/src/main/scala/spark/streaming/Job.scala b/streaming/src/main/scala/org/apache/spark/streaming/Job.scala
index ceb3f92b65..2128b7c7a6 100644
--- a/streaming/src/main/scala/spark/streaming/Job.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Job.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
import java.util.concurrent.atomic.AtomicLong
diff --git a/streaming/src/main/scala/spark/streaming/JobManager.scala b/streaming/src/main/scala/org/apache/spark/streaming/JobManager.scala
index a31230689f..5233129506 100644
--- a/streaming/src/main/scala/spark/streaming/JobManager.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/JobManager.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
-import spark.Logging
-import spark.SparkEnv
+import org.apache.spark.Logging
+import org.apache.spark.SparkEnv
import java.util.concurrent.Executors
import collection.mutable.HashMap
import collection.mutable.ArrayBuffer
diff --git a/streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala b/streaming/src/main/scala/org/apache/spark/streaming/NetworkInputTracker.scala
index d4cf2e568c..aae79a4e6f 100644
--- a/streaming/src/main/scala/spark/streaming/NetworkInputTracker.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/NetworkInputTracker.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
-import spark.streaming.dstream.{NetworkInputDStream, NetworkReceiver}
-import spark.streaming.dstream.{StopReceiver, ReportBlock, ReportError}
-import spark.Logging
-import spark.SparkEnv
-import spark.SparkContext._
+import org.apache.spark.streaming.dstream.{NetworkInputDStream, NetworkReceiver}
+import org.apache.spark.streaming.dstream.{StopReceiver, ReportBlock, ReportError}
+import org.apache.spark.Logging
+import org.apache.spark.SparkEnv
+import org.apache.spark.SparkContext._
import scala.collection.mutable.HashMap
import scala.collection.mutable.Queue
diff --git a/streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala b/streaming/src/main/scala/org/apache/spark/streaming/PairDStreamFunctions.scala
index 47bf07bee1..d8a7381e87 100644
--- a/streaming/src/main/scala/spark/streaming/PairDStreamFunctions.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/PairDStreamFunctions.scala
@@ -15,16 +15,16 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
-import spark.streaming.StreamingContext._
-import spark.streaming.dstream.{ReducedWindowedDStream, StateDStream}
-import spark.streaming.dstream.{CoGroupedDStream, ShuffledDStream}
-import spark.streaming.dstream.{MapValuedDStream, FlatMapValuedDStream}
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.streaming.dstream.{ReducedWindowedDStream, StateDStream}
+import org.apache.spark.streaming.dstream.{CoGroupedDStream, ShuffledDStream}
+import org.apache.spark.streaming.dstream.{MapValuedDStream, FlatMapValuedDStream}
-import spark.{Manifests, RDD, Partitioner, HashPartitioner}
-import spark.SparkContext._
-import spark.storage.StorageLevel
+import org.apache.spark.{Manifests, RDD, Partitioner, HashPartitioner}
+import org.apache.spark.SparkContext._
+import org.apache.spark.storage.StorageLevel
import scala.collection.mutable.ArrayBuffer
@@ -60,7 +60,7 @@ extends Serializable {
}
/**
- * Return a new DStream by applying `groupByKey` on each RDD. The supplied [[spark.Partitioner]]
+ * Return a new DStream by applying `groupByKey` on each RDD. The supplied [[org.apache.spark.Partitioner]]
* is used to control the partitioning of each RDD.
*/
def groupByKey(partitioner: Partitioner): DStream[(K, Seq[V])] = {
@@ -91,7 +91,7 @@ extends Serializable {
/**
* Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are
- * merged using the supplied reduce function. [[spark.Partitioner]] is used to control the
+ * merged using the supplied reduce function. [[org.apache.spark.Partitioner]] is used to control the
* partitioning of each RDD.
*/
def reduceByKey(reduceFunc: (V, V) => V, partitioner: Partitioner): DStream[(K, V)] = {
@@ -101,7 +101,7 @@ extends Serializable {
/**
* Combine elements of each key in DStream's RDDs using custom functions. This is similar to the
- * combineByKey for RDDs. Please refer to combineByKey in [[spark.PairRDDFunctions]] for more
+ * combineByKey for RDDs. Please refer to combineByKey in [[org.apache.spark.PairRDDFunctions]] for more
* information.
*/
def combineByKey[C: ClassManifest](
@@ -360,7 +360,7 @@ extends Serializable {
/**
* Create a new "state" DStream where the state for each key is updated by applying
* the given function on the previous state of the key and the new values of the key.
- * [[spark.Partitioner]] is used to control the partitioning of each RDD.
+ * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD.
* @param updateFunc State update function. If `this` function returns None, then
* corresponding state key-value pair will be eliminated.
* @param partitioner Partitioner for controlling the partitioning of each RDD in the new DStream.
@@ -379,7 +379,7 @@ extends Serializable {
/**
* Return a new "state" DStream where the state for each key is updated by applying
* the given function on the previous state of the key and the new values of each key.
- * [[spark.Paxrtitioner]] is used to control the partitioning of each RDD.
+ * [[org.apache.spark.Paxrtitioner]] is used to control the partitioning of each RDD.
* @param updateFunc State update function. If `this` function returns None, then
* corresponding state key-value pair will be eliminated. Note, that
* this function may generate a different a tuple with a different key
diff --git a/streaming/src/main/scala/spark/streaming/Scheduler.scala b/streaming/src/main/scala/org/apache/spark/streaming/Scheduler.scala
index 252cc2a303..ed892e33e6 100644
--- a/streaming/src/main/scala/spark/streaming/Scheduler.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Scheduler.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
import util.{ManualClock, RecurringTimer, Clock}
-import spark.SparkEnv
-import spark.Logging
+import org.apache.spark.SparkEnv
+import org.apache.spark.Logging
private[streaming]
class Scheduler(ssc: StreamingContext) extends Logging {
@@ -34,7 +34,8 @@ class Scheduler(ssc: StreamingContext) extends Logging {
null
}
- val clockClass = System.getProperty("spark.streaming.clock", "spark.streaming.util.SystemClock")
+ val clockClass = System.getProperty(
+ "spark.streaming.clock", "org.apache.spark.streaming.util.SystemClock")
val clock = Class.forName(clockClass).newInstance().asInstanceOf[Clock]
val timer = new RecurringTimer(clock, ssc.graph.batchDuration.milliseconds,
longTime => generateJobs(new Time(longTime)))
diff --git a/streaming/src/main/scala/spark/streaming/StreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
index 62c95b573a..3852ac2dab 100644
--- a/streaming/src/main/scala/spark/streaming/StreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
@@ -15,21 +15,21 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
import akka.actor.Props
import akka.actor.SupervisorStrategy
import akka.zeromq.Subscribe
-import spark.streaming.dstream._
+import org.apache.spark.streaming.dstream._
-import spark._
-import spark.streaming.receivers.ActorReceiver
-import spark.streaming.receivers.ReceiverSupervisorStrategy
-import spark.streaming.receivers.ZeroMQReceiver
-import spark.storage.StorageLevel
-import spark.util.MetadataCleaner
-import spark.streaming.receivers.ActorReceiver
+import org.apache.spark._
+import org.apache.spark.streaming.receivers.ActorReceiver
+import org.apache.spark.streaming.receivers.ReceiverSupervisorStrategy
+import org.apache.spark.streaming.receivers.ZeroMQReceiver
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.MetadataCleaner
+import org.apache.spark.streaming.receivers.ActorReceiver
import scala.collection.mutable.Queue
import scala.collection.Map
diff --git a/streaming/src/main/scala/spark/streaming/Time.scala b/streaming/src/main/scala/org/apache/spark/streaming/Time.scala
index ad5eab9dd2..2678334f53 100644
--- a/streaming/src/main/scala/spark/streaming/Time.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/Time.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
/**
* This is a simple class that represents an absolute instant of time.
diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala
index 7dcb1d713d..f8c8d8ece1 100644
--- a/streaming/src/main/scala/spark/streaming/api/java/JavaDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStream.scala
@@ -15,17 +15,17 @@
* limitations under the License.
*/
-package spark.streaming.api.java
+package org.apache.spark.streaming.api.java
-import spark.streaming.{Duration, Time, DStream}
-import spark.api.java.function.{Function => JFunction}
-import spark.api.java.JavaRDD
-import spark.storage.StorageLevel
-import spark.RDD
+import org.apache.spark.streaming.{Duration, Time, DStream}
+import org.apache.spark.api.java.function.{Function => JFunction}
+import org.apache.spark.api.java.JavaRDD
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.RDD
/**
* A Discretized Stream (DStream), the basic abstraction in Spark Streaming, is a continuous
- * sequence of RDDs (of the same type) representing a continuous stream of data (see [[spark.RDD]]
+ * sequence of RDDs (of the same type) representing a continuous stream of data (see [[org.apache.spark.RDD]]
* for more details on RDDs). DStreams can either be created from live data (such as, data from
* HDFS, Kafka or Flume) or it can be generated by transformation existing DStreams using operations
* such as `map`, `window` and `reduceByKeyAndWindow`. While a Spark Streaming program is running, each
@@ -33,7 +33,7 @@ import spark.RDD
* by a parent DStream.
*
* This class contains the basic operations available on all DStreams, such as `map`, `filter` and
- * `window`. In addition, [[spark.streaming.api.java.JavaPairDStream]] contains operations available
+ * `window`. In addition, [[org.apache.spark.streaming.api.java.JavaPairDStream]] contains operations available
* only on DStreams of key-value pairs, such as `groupByKeyAndWindow` and `join`.
*
* DStreams internally is characterized by a few basic properties:
diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaDStreamLike.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
index 3ab5c1fdde..2e6fe9a9c4 100644
--- a/streaming/src/main/scala/spark/streaming/api/java/JavaDStreamLike.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
@@ -15,18 +15,18 @@
* limitations under the License.
*/
-package spark.streaming.api.java
+package org.apache.spark.streaming.api.java
import java.util.{List => JList}
import java.lang.{Long => JLong}
import scala.collection.JavaConversions._
-import spark.streaming._
-import spark.api.java.{JavaPairRDD, JavaRDDLike, JavaRDD}
-import spark.api.java.function.{Function2 => JFunction2, Function => JFunction, _}
+import org.apache.spark.streaming._
+import org.apache.spark.api.java.{JavaPairRDD, JavaRDDLike, JavaRDD}
+import org.apache.spark.api.java.function.{Function2 => JFunction2, Function => JFunction, _}
import java.util
-import spark.RDD
+import org.apache.spark.RDD
import JavaDStream._
trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T, R]]
diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaPairDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala
index ea08fb3826..c203dccd17 100644
--- a/streaming/src/main/scala/spark/streaming/api/java/JavaPairDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaPairDStream.scala
@@ -15,24 +15,24 @@
* limitations under the License.
*/
-package spark.streaming.api.java
+package org.apache.spark.streaming.api.java
import java.util.{List => JList}
import java.lang.{Long => JLong}
import scala.collection.JavaConversions._
-import spark.streaming._
-import spark.streaming.StreamingContext._
-import spark.api.java.function.{Function => JFunction, Function2 => JFunction2}
-import spark.{RDD, Partitioner}
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.StreamingContext._
+import org.apache.spark.api.java.function.{Function => JFunction, Function2 => JFunction2}
+import org.apache.spark.{RDD, Partitioner}
import org.apache.hadoop.mapred.{JobConf, OutputFormat}
import org.apache.hadoop.mapreduce.{OutputFormat => NewOutputFormat}
import org.apache.hadoop.conf.Configuration
-import spark.api.java.{JavaUtils, JavaRDD, JavaPairRDD}
-import spark.storage.StorageLevel
+import org.apache.spark.api.java.{JavaUtils, JavaRDD, JavaPairRDD}
+import org.apache.spark.storage.StorageLevel
import com.google.common.base.Optional
-import spark.RDD
+import org.apache.spark.RDD
class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
implicit val kManifiest: ClassManifest[K],
@@ -114,7 +114,7 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
/**
* Return a new DStream by applying `groupByKey` on each RDD of `this` DStream.
* Therefore, the values for each key in `this` DStream's RDDs are grouped into a
- * single sequence to generate the RDDs of the new DStream. [[spark.Partitioner]]
+ * single sequence to generate the RDDs of the new DStream. [[org.apache.spark.Partitioner]]
* is used to control the partitioning of each RDD.
*/
def groupByKey(partitioner: Partitioner): JavaPairDStream[K, JList[V]] =
@@ -138,7 +138,7 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
/**
* Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are
- * merged using the supplied reduce function. [[spark.Partitioner]] is used to control the
+ * merged using the supplied reduce function. [[org.apache.spark.Partitioner]] is used to control the
* partitioning of each RDD.
*/
def reduceByKey(func: JFunction2[V, V, V], partitioner: Partitioner): JavaPairDStream[K, V] = {
@@ -147,7 +147,7 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
/**
* Combine elements of each key in DStream's RDDs using custom function. This is similar to the
- * combineByKey for RDDs. Please refer to combineByKey in [[spark.PairRDDFunctions]] for more
+ * combineByKey for RDDs. Please refer to combineByKey in [[org.apache.spark.PairRDDFunctions]] for more
* information.
*/
def combineByKey[C](createCombiner: JFunction[V, C],
@@ -445,7 +445,7 @@ class JavaPairDStream[K, V](val dstream: DStream[(K, V)])(
/**
* Create a new "state" DStream where the state for each key is updated by applying
* the given function on the previous state of the key and the new values of the key.
- * [[spark.Partitioner]] is used to control the partitioning of each RDD.
+ * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD.
* @param updateFunc State update function. If `this` function returns None, then
* corresponding state key-value pair will be eliminated.
* @param partitioner Partitioner for controlling the partitioning of each RDD in the new DStream.
diff --git a/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
index b7720ad0ea..f10beb1db3 100644
--- a/streaming/src/main/scala/spark/streaming/api/java/JavaStreamingContext.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.streaming.api.java
+package org.apache.spark.streaming.api.java
-import spark.streaming._
+import org.apache.spark.streaming._
import receivers.{ActorReceiver, ReceiverSupervisorStrategy}
-import spark.streaming.dstream._
-import spark.storage.StorageLevel
-import spark.api.java.function.{Function => JFunction, Function2 => JFunction2}
-import spark.api.java.{JavaSparkContext, JavaRDD}
+import org.apache.spark.streaming.dstream._
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.api.java.function.{Function => JFunction, Function2 => JFunction2}
+import org.apache.spark.api.java.{JavaSparkContext, JavaRDD}
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
import twitter4j.Status
import akka.actor.Props
@@ -33,6 +33,7 @@ import java.lang.{Long => JLong, Integer => JInt}
import java.io.InputStream
import java.util.{Map => JMap}
import twitter4j.auth.Authorization
+import org.apache.spark.RDD
/**
* A StreamingContext is the main entry point for Spark Streaming functionality. Besides the basic
@@ -537,7 +538,7 @@ class JavaStreamingContext(val ssc: StreamingContext) {
def queueStream[T](queue: java.util.Queue[JavaRDD[T]]): JavaDStream[T] = {
implicit val cm: ClassManifest[T] =
implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
- val sQueue = new scala.collection.mutable.Queue[spark.RDD[T]]
+ val sQueue = new scala.collection.mutable.Queue[RDD[T]]
sQueue.enqueue(queue.map(_.rdd).toSeq: _*)
ssc.queueStream(sQueue)
}
@@ -554,7 +555,7 @@ class JavaStreamingContext(val ssc: StreamingContext) {
def queueStream[T](queue: java.util.Queue[JavaRDD[T]], oneAtATime: Boolean): JavaDStream[T] = {
implicit val cm: ClassManifest[T] =
implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
- val sQueue = new scala.collection.mutable.Queue[spark.RDD[T]]
+ val sQueue = new scala.collection.mutable.Queue[RDD[T]]
sQueue.enqueue(queue.map(_.rdd).toSeq: _*)
ssc.queueStream(sQueue, oneAtATime)
}
@@ -575,7 +576,7 @@ class JavaStreamingContext(val ssc: StreamingContext) {
defaultRDD: JavaRDD[T]): JavaDStream[T] = {
implicit val cm: ClassManifest[T] =
implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
- val sQueue = new scala.collection.mutable.Queue[spark.RDD[T]]
+ val sQueue = new scala.collection.mutable.Queue[RDD[T]]
sQueue.enqueue(queue.map(_.rdd).toSeq: _*)
ssc.queueStream(sQueue, oneAtATime, defaultRDD.rdd)
}
diff --git a/streaming/src/main/scala/spark/streaming/dstream/CoGroupedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/CoGroupedDStream.scala
index 99553d295d..4a9d82211f 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/CoGroupedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/CoGroupedDStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.{RDD, Partitioner}
-import spark.rdd.CoGroupedRDD
-import spark.streaming.{Time, DStream, Duration}
+import org.apache.spark.{RDD, Partitioner}
+import org.apache.spark.rdd.CoGroupedRDD
+import org.apache.spark.streaming.{Time, DStream, Duration}
private[streaming]
class CoGroupedDStream[K : ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/ConstantInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala
index 095137092a..35cc4cb396 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/ConstantInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ConstantInputDStream.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.RDD
-import spark.streaming.{Time, StreamingContext}
+import org.apache.spark.RDD
+import org.apache.spark.streaming.{Time, StreamingContext}
/**
* An input stream that always returns the same RDD on each timestep. Useful for testing.
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FileInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
index de0536125d..1c265ed972 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FileInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FileInputDStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.RDD
-import spark.rdd.UnionRDD
-import spark.streaming.{DStreamCheckpointData, StreamingContext, Time}
+import org.apache.spark.RDD
+import org.apache.spark.rdd.UnionRDD
+import org.apache.spark.streaming.{DStreamCheckpointData, StreamingContext, Time}
import org.apache.hadoop.fs.{FileSystem, Path, PathFilter}
import org.apache.hadoop.conf.Configuration
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FilteredDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala
index 9d8c5c3175..3166c68760 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FilteredDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FilteredDStream.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.RDD
private[streaming]
class FilteredDStream[T: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FlatMapValuedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala
index 78d7117f0f..21950ad6ac 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FlatMapValuedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMapValuedDStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
-import spark.SparkContext._
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext._
private[streaming]
class FlatMapValuedDStream[K: ClassManifest, V: ClassManifest, U: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FlatMappedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala
index d13bebb10f..8377cfe60c 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FlatMappedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlatMappedDStream.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.RDD
private[streaming]
class FlatMappedDStream[T: ClassManifest, U: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/FlumeInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlumeInputDStream.scala
index 4906f503c2..3fb443143c 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/FlumeInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlumeInputDStream.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.StreamingContext
+import org.apache.spark.streaming.StreamingContext
-import spark.Utils
-import spark.storage.StorageLevel
+import org.apache.spark.Utils
+import org.apache.spark.storage.StorageLevel
import org.apache.flume.source.avro.AvroSourceProtocol
import org.apache.flume.source.avro.AvroFlumeEvent
diff --git a/streaming/src/main/scala/spark/streaming/dstream/ForEachDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala
index 7df537eb56..c1f95650c8 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/ForEachDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ForEachDStream.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.RDD
-import spark.streaming.{Duration, DStream, Job, Time}
+import org.apache.spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Job, Time}
private[streaming]
class ForEachDStream[T: ClassManifest] (
diff --git a/streaming/src/main/scala/spark/streaming/dstream/GlommedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala
index 06fda6fe8e..1e4c7e7fde 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/GlommedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/GlommedDStream.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.RDD
private[streaming]
class GlommedDStream[T: ClassManifest](parent: DStream[T])
diff --git a/streaming/src/main/scala/spark/streaming/dstream/InputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/InputDStream.scala
index 4dbdec459d..674b27118c 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/InputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/InputDStream.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Time, Duration, StreamingContext, DStream}
+import org.apache.spark.streaming.{Time, Duration, StreamingContext, DStream}
/**
* This is the abstract base class for all input streams. This class provides to methods
diff --git a/streaming/src/main/scala/spark/streaming/dstream/KafkaInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala
index 6ee588af15..51e913675d 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/KafkaInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.Logging
-import spark.storage.StorageLevel
-import spark.streaming.{Time, DStreamCheckpointData, StreamingContext}
+import org.apache.spark.Logging
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.{Time, DStreamCheckpointData, StreamingContext}
import java.util.Properties
import java.util.concurrent.Executors
diff --git a/streaming/src/main/scala/spark/streaming/dstream/MapPartitionedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala
index af41a1b9ac..1d79d707bb 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/MapPartitionedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapPartitionedDStream.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.RDD
private[streaming]
class MapPartitionedDStream[T: ClassManifest, U: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/MapValuedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala
index 8d8a6161c6..312e0c0567 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/MapValuedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MapValuedDStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
-import spark.SparkContext._
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.RDD
+import org.apache.spark.SparkContext._
private[streaming]
class MapValuedDStream[K: ClassManifest, V: ClassManifest, U: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/MappedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala
index 3fda84a38a..af688dde5f 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/MappedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/MappedDStream.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.RDD
private[streaming]
class MappedDStream[T: ClassManifest, U: ClassManifest] (
diff --git a/streaming/src/main/scala/spark/streaming/dstream/NetworkInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/NetworkInputDStream.scala
index 1db0a69a2f..3d68da36a2 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/NetworkInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/NetworkInputDStream.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Time, StreamingContext, AddBlocks, RegisterReceiver, DeregisterReceiver}
+import org.apache.spark.streaming.{Time, StreamingContext, AddBlocks, RegisterReceiver, DeregisterReceiver}
-import spark.{Logging, SparkEnv, RDD}
-import spark.rdd.BlockRDD
-import spark.storage.StorageLevel
+import org.apache.spark.{Logging, SparkEnv, RDD}
+import org.apache.spark.rdd.BlockRDD
+import org.apache.spark.storage.StorageLevel
import scala.collection.mutable.ArrayBuffer
@@ -31,14 +31,14 @@ import akka.actor.{Props, Actor}
import akka.pattern.ask
import akka.dispatch.Await
import akka.util.duration._
-import spark.streaming.util.{RecurringTimer, SystemClock}
+import org.apache.spark.streaming.util.{RecurringTimer, SystemClock}
import java.util.concurrent.ArrayBlockingQueue
/**
* Abstract class for defining any InputDStream that has to start a receiver on worker
* nodes to receive external data. Specific implementations of NetworkInputDStream must
* define the getReceiver() function that gets the receiver object of type
- * [[spark.streaming.dstream.NetworkReceiver]] that will be sent to the workers to receive
+ * [[org.apache.spark.streaming.dstream.NetworkReceiver]] that will be sent to the workers to receive
* data.
* @param ssc_ Streaming context that will execute this input stream
* @tparam T Class type of the object of this stream
@@ -83,7 +83,7 @@ private[streaming] case class ReportError(msg: String) extends NetworkReceiverMe
/**
* Abstract class of a receiver that can be run on worker nodes to receive external data. See
- * [[spark.streaming.dstream.NetworkInputDStream]] for an explanation.
+ * [[org.apache.spark.streaming.dstream.NetworkInputDStream]] for an explanation.
*/
abstract class NetworkReceiver[T: ClassManifest]() extends Serializable with Logging {
@@ -202,7 +202,7 @@ abstract class NetworkReceiver[T: ClassManifest]() extends Serializable with Log
}
/**
- * Batches objects created by a [[spark.streaming.dstream.NetworkReceiver]] and puts them into
+ * Batches objects created by a [[org.apache.spark.streaming.dstream.NetworkReceiver]] and puts them into
* appropriately named blocks at regular intervals. This class starts two threads,
* one to periodically start a new batch and prepare the previous batch of as a block,
* the other to push the blocks into the block manager.
diff --git a/streaming/src/main/scala/spark/streaming/dstream/PluggableInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala
index 33f7cd063f..15782f5c11 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/PluggableInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/PluggableInputDStream.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.StreamingContext
+import org.apache.spark.streaming.StreamingContext
private[streaming]
class PluggableInputDStream[T: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala
index b269061b73..b43ecaeebe 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/QueueInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/QueueInputDStream.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.RDD
-import spark.rdd.UnionRDD
+import org.apache.spark.RDD
+import org.apache.spark.rdd.UnionRDD
import scala.collection.mutable.Queue
import scala.collection.mutable.ArrayBuffer
-import spark.streaming.{Time, StreamingContext}
+import org.apache.spark.streaming.{Time, StreamingContext}
private[streaming]
class QueueInputDStream[T: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/RawInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala
index 236f74f575..c91f12ecd7 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/RawInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/RawInputDStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.Logging
-import spark.storage.StorageLevel
-import spark.streaming.StreamingContext
+import org.apache.spark.Logging
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.StreamingContext
import java.net.InetSocketAddress
import java.nio.ByteBuffer
diff --git a/streaming/src/main/scala/spark/streaming/dstream/ReducedWindowedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala
index 96260501ab..b6c672f899 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/ReducedWindowedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ReducedWindowedDStream.scala
@@ -15,18 +15,18 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.StreamingContext._
-import spark.RDD
-import spark.rdd.{CoGroupedRDD, MapPartitionsRDD}
-import spark.Partitioner
-import spark.SparkContext._
-import spark.storage.StorageLevel
+import org.apache.spark.RDD
+import org.apache.spark.rdd.{CoGroupedRDD, MapPartitionsRDD}
+import org.apache.spark.Partitioner
+import org.apache.spark.SparkContext._
+import org.apache.spark.storage.StorageLevel
import scala.collection.mutable.ArrayBuffer
-import spark.streaming.{Duration, Interval, Time, DStream}
+import org.apache.spark.streaming.{Duration, Interval, Time, DStream}
private[streaming]
class ReducedWindowedDStream[K: ClassManifest, V: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/ShuffledDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala
index 83b57b27f7..3a0bd2acd7 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/ShuffledDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/ShuffledDStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.{RDD, Partitioner}
-import spark.SparkContext._
-import spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.{RDD, Partitioner}
+import org.apache.spark.SparkContext._
+import org.apache.spark.streaming.{Duration, DStream, Time}
private[streaming]
class ShuffledDStream[K: ClassManifest, V: ClassManifest, C: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/SocketInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala
index 5877b10e0e..e2539c7396 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/SocketInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/SocketInputDStream.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.StreamingContext
-import spark.storage.StorageLevel
-import spark.util.NextIterator
+import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.util.NextIterator
import java.io._
import java.net.Socket
diff --git a/streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala
index 4b46613d5e..c1c9f808f0 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/StateDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/StateDStream.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.RDD
-import spark.Partitioner
-import spark.SparkContext._
-import spark.storage.StorageLevel
-import spark.streaming.{Duration, Time, DStream}
+import org.apache.spark.RDD
+import org.apache.spark.Partitioner
+import org.apache.spark.SparkContext._
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.{Duration, Time, DStream}
private[streaming]
class StateDStream[K: ClassManifest, V: ClassManifest, S: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/dstream/TransformedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/TransformedDStream.scala
index e7fbc5bbcf..edba2032b4 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/TransformedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/TransformedDStream.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.RDD
-import spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
private[streaming]
class TransformedDStream[T: ClassManifest, U: ClassManifest] (
diff --git a/streaming/src/main/scala/spark/streaming/dstream/TwitterInputDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/TwitterInputDStream.scala
index f09a8b9f90..387e15b0e6 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/TwitterInputDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/TwitterInputDStream.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark._
-import spark.streaming._
+import org.apache.spark._
+import org.apache.spark.streaming._
import storage.StorageLevel
import twitter4j._
import twitter4j.auth.Authorization
diff --git a/streaming/src/main/scala/spark/streaming/dstream/UnionDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala
index 3eaa9a7e7f..97eab97b2f 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/UnionDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/UnionDStream.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.streaming.{Duration, DStream, Time}
-import spark.RDD
+import org.apache.spark.streaming.{Duration, DStream, Time}
+import org.apache.spark.RDD
import collection.mutable.ArrayBuffer
-import spark.rdd.UnionRDD
+import org.apache.spark.rdd.UnionRDD
private[streaming]
class UnionDStream[T: ClassManifest](parents: Array[DStream[T]])
diff --git a/streaming/src/main/scala/spark/streaming/dstream/WindowedDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala
index fd24d61730..dbbea39e81 100644
--- a/streaming/src/main/scala/spark/streaming/dstream/WindowedDStream.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/WindowedDStream.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.streaming.dstream
+package org.apache.spark.streaming.dstream
-import spark.RDD
-import spark.rdd.UnionRDD
-import spark.storage.StorageLevel
-import spark.streaming.{Duration, Interval, Time, DStream}
+import org.apache.spark.RDD
+import org.apache.spark.rdd.UnionRDD
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.{Duration, Interval, Time, DStream}
private[streaming]
class WindowedDStream[T: ClassManifest](
diff --git a/streaming/src/main/scala/spark/streaming/receivers/ActorReceiver.scala b/streaming/src/main/scala/org/apache/spark/streaming/receivers/ActorReceiver.scala
index abeeff11b9..4b5d8c467e 100644
--- a/streaming/src/main/scala/spark/streaming/receivers/ActorReceiver.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/receivers/ActorReceiver.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.streaming.receivers
+package org.apache.spark.streaming.receivers
import akka.actor.{ Actor, PoisonPill, Props, SupervisorStrategy }
import akka.actor.{ actorRef2Scala, ActorRef }
import akka.actor.{ PossiblyHarmful, OneForOneStrategy }
-import spark.storage.StorageLevel
-import spark.streaming.dstream.NetworkReceiver
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.dstream.NetworkReceiver
import java.util.concurrent.atomic.AtomicInteger
diff --git a/streaming/src/main/scala/spark/streaming/receivers/ZeroMQReceiver.scala b/streaming/src/main/scala/org/apache/spark/streaming/receivers/ZeroMQReceiver.scala
index 22d554e7e4..043bb8c8bf 100644
--- a/streaming/src/main/scala/spark/streaming/receivers/ZeroMQReceiver.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/receivers/ZeroMQReceiver.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.streaming.receivers
+package org.apache.spark.streaming.receivers
import akka.actor.Actor
import akka.zeromq._
-import spark.Logging
+import org.apache.spark.Logging
/**
* A receiver to subscribe to ZeroMQ stream.
diff --git a/streaming/src/main/scala/spark/streaming/util/Clock.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/Clock.scala
index d9ac722df5..f67bb2f6ac 100644
--- a/streaming/src/main/scala/spark/streaming/util/Clock.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/Clock.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming.util
+package org.apache.spark.streaming.util
private[streaming]
trait Clock {
diff --git a/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala
index 8ce5d8daf5..50d72298e4 100644
--- a/streaming/src/main/scala/spark/streaming/util/MasterFailureTest.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/MasterFailureTest.scala
@@ -15,11 +15,11 @@
* limitations under the License.
*/
-package spark.streaming.util
+package org.apache.spark.streaming.util
-import spark.{Logging, RDD}
-import spark.streaming._
-import spark.streaming.dstream.ForEachDStream
+import org.apache.spark.{Logging, RDD}
+import org.apache.spark.streaming._
+import org.apache.spark.streaming.dstream.ForEachDStream
import StreamingContext._
import scala.util.Random
diff --git a/streaming/src/main/scala/spark/streaming/util/RawTextHelper.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala
index bf04120293..4e6ce6eabd 100644
--- a/streaming/src/main/scala/spark/streaming/util/RawTextHelper.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextHelper.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming.util
+package org.apache.spark.streaming.util
-import spark.SparkContext
-import spark.SparkContext._
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap}
import scala.collection.JavaConversions.mapAsScalaMap
diff --git a/streaming/src/main/scala/spark/streaming/util/RawTextSender.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
index 5cc6ad9dee..249f6a22ae 100644
--- a/streaming/src/main/scala/spark/streaming/util/RawTextSender.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RawTextSender.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.streaming.util
+package org.apache.spark.streaming.util
import java.nio.ByteBuffer
-import spark.util.{RateLimitedOutputStream, IntParam}
+import org.apache.spark.util.{RateLimitedOutputStream, IntParam}
import java.net.ServerSocket
-import spark.{Logging, KryoSerializer}
+import org.apache.spark.{Logging, KryoSerializer}
import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
import scala.io.Source
import java.io.IOException
diff --git a/streaming/src/main/scala/spark/streaming/util/RecurringTimer.scala b/streaming/src/main/scala/org/apache/spark/streaming/util/RecurringTimer.scala
index 7ecc44236d..d644240405 100644
--- a/streaming/src/main/scala/spark/streaming/util/RecurringTimer.scala
+++ b/streaming/src/main/scala/org/apache/spark/streaming/util/RecurringTimer.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming.util
+package org.apache.spark.streaming.util
private[streaming]
class RecurringTimer(val clock: Clock, val period: Long, val callback: (Long) => Unit) {
diff --git a/streaming/src/test/java/spark/streaming/JavaAPISuite.java b/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java
index 3b93790baa..c0d729ff87 100644
--- a/streaming/src/test/java/spark/streaming/JavaAPISuite.java
+++ b/streaming/src/test/java/org/apache/spark/streaming/JavaAPISuite.java
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming;
+package org.apache.spark.streaming;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
@@ -28,20 +28,20 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import scala.Tuple2;
-import spark.HashPartitioner;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaRDD;
-import spark.api.java.JavaRDDLike;
-import spark.api.java.JavaPairRDD;
-import spark.api.java.JavaSparkContext;
-import spark.api.java.function.*;
-import spark.storage.StorageLevel;
-import spark.streaming.api.java.JavaDStream;
-import spark.streaming.api.java.JavaPairDStream;
-import spark.streaming.api.java.JavaStreamingContext;
-import spark.streaming.JavaTestUtils;
-import spark.streaming.JavaCheckpointTestUtils;
-import spark.streaming.InputStreamsSuite;
+import org.apache.spark.HashPartitioner;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaRDD;
+import org.apache.spark.api.java.JavaRDDLike;
+import org.apache.spark.api.java.JavaPairRDD;
+import org.apache.spark.api.java.JavaSparkContext;
+import org.apache.spark.api.java.function.*;
+import org.apache.spark.storage.StorageLevel;
+import org.apache.spark.streaming.api.java.JavaDStream;
+import org.apache.spark.streaming.api.java.JavaPairDStream;
+import org.apache.spark.streaming.api.java.JavaStreamingContext;
+import org.apache.spark.streaming.JavaTestUtils;
+import org.apache.spark.streaming.JavaCheckpointTestUtils;
+import org.apache.spark.streaming.InputStreamsSuite;
import java.io.*;
import java.util.*;
@@ -59,7 +59,7 @@ public class JavaAPISuite implements Serializable {
@Before
public void setUp() {
- System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock");
+ System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock");
ssc = new JavaStreamingContext("local[2]", "test", new Duration(1000));
ssc.checkpoint("checkpoint");
}
diff --git a/streaming/src/test/java/spark/streaming/JavaTestUtils.scala b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala
index f9d25db8da..8a6604904d 100644
--- a/streaming/src/test/java/spark/streaming/JavaTestUtils.scala
+++ b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala
@@ -15,20 +15,21 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
import collection.mutable.{SynchronizedBuffer, ArrayBuffer}
import java.util.{List => JList}
-import spark.streaming.api.java.{JavaPairDStream, JavaDStreamLike, JavaDStream, JavaStreamingContext}
-import spark.streaming._
+import org.apache.spark.streaming.api.java.{JavaPairDStream, JavaDStreamLike, JavaDStream, JavaStreamingContext}
+import org.apache.spark.streaming._
import java.util.ArrayList
import collection.JavaConversions._
+import org.apache.spark.api.java.JavaRDDLike
/** Exposes streaming test functionality in a Java-friendly way. */
trait JavaTestBase extends TestSuiteBase {
/**
- * Create a [[spark.streaming.TestInputStream]] and attach it to the supplied context.
+ * Create a [[org.apache.spark.streaming.TestInputStream]] and attach it to the supplied context.
* The stream will be derived from the supplied lists of Java objects.
**/
def attachTestInputStream[T](
@@ -46,11 +47,11 @@ trait JavaTestBase extends TestSuiteBase {
/**
* Attach a provided stream to it's associated StreamingContext as a
- * [[spark.streaming.TestOutputStream]].
+ * [[org.apache.spark.streaming.TestOutputStream]].
**/
- def attachTestOutputStream[T, This <: spark.streaming.api.java.JavaDStreamLike[T, This, R],
- R <: spark.api.java.JavaRDDLike[T, R]](
- dstream: JavaDStreamLike[T, This, R]) = {
+ def attachTestOutputStream[T, This <: JavaDStreamLike[T, This, R], R <: JavaRDDLike[T, R]](
+ dstream: JavaDStreamLike[T, This, R]) =
+ {
implicit val cm: ClassManifest[T] =
implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[T]]
val ostream = new TestOutputStream(dstream.dstream,
diff --git a/streaming/src/test/scala/spark/streaming/BasicOperationsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
index 67e3e0cd30..11586f72b6 100644
--- a/streaming/src/test/scala/spark/streaming/BasicOperationsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/BasicOperationsSuite.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.StreamingContext._
import scala.runtime.RichInt
import util.ManualClock
@@ -26,7 +26,7 @@ class BasicOperationsSuite extends TestSuiteBase {
override def framework() = "BasicOperationsSuite"
before {
- System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+ System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
}
after {
diff --git a/streaming/src/test/scala/spark/streaming/CheckpointSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
index 8c639648f0..a327de80b3 100644
--- a/streaming/src/test/scala/spark/streaming/CheckpointSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/CheckpointSuite.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
import dstream.FileInputDStream
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.StreamingContext._
import java.io.File
import runtime.RichInt
import org.scalatest.BeforeAndAfter
@@ -36,7 +36,7 @@ import com.google.common.io.Files
*/
class CheckpointSuite extends TestSuiteBase with BeforeAndAfter {
- System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+ System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
before {
FileUtils.deleteDirectory(new File(checkpointDir))
@@ -63,7 +63,7 @@ class CheckpointSuite extends TestSuiteBase with BeforeAndAfter {
assert(batchDuration === Milliseconds(500), "batchDuration for this test must be 1 second")
- System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+ System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
val stateStreamCheckpointInterval = Seconds(1)
diff --git a/streaming/src/test/scala/spark/streaming/FailureSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala
index 7fc649fe27..6337c5359c 100644
--- a/streaming/src/test/scala/spark/streaming/FailureSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/FailureSuite.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
-import spark.Logging
-import spark.streaming.util.MasterFailureTest
+import org.apache.spark.Logging
+import org.apache.spark.streaming.util.MasterFailureTest
import StreamingContext._
import org.scalatest.{FunSuite, BeforeAndAfter}
diff --git a/streaming/src/test/scala/spark/streaming/InputStreamsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
index 1c5419b16d..42e3e51e3f 100644
--- a/streaming/src/test/scala/spark/streaming/InputStreamsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/InputStreamsSuite.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
import akka.actor.Actor
import akka.actor.IO
@@ -29,9 +29,9 @@ import java.io.{File, BufferedWriter, OutputStreamWriter}
import java.util.concurrent.{TimeUnit, ArrayBlockingQueue}
import collection.mutable.{SynchronizedBuffer, ArrayBuffer}
import util.ManualClock
-import spark.storage.StorageLevel
-import spark.streaming.receivers.Receiver
-import spark.Logging
+import org.apache.spark.storage.StorageLevel
+import org.apache.spark.streaming.receivers.Receiver
+import org.apache.spark.Logging
import scala.util.Random
import org.apache.commons.io.FileUtils
import org.scalatest.BeforeAndAfter
@@ -52,7 +52,7 @@ class InputStreamsSuite extends TestSuiteBase with BeforeAndAfter {
override def checkpointDir = "checkpoint"
before {
- System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+ System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
}
after {
@@ -207,7 +207,7 @@ class InputStreamsSuite extends TestSuiteBase with BeforeAndAfter {
FileUtils.deleteDirectory(testDir)
// Enable manual clock back again for other tests
- System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+ System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
}
diff --git a/streaming/src/test/scala/spark/streaming/TestSuiteBase.scala b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
index cb34b5a7cc..31c2fa0208 100644
--- a/streaming/src/test/scala/spark/streaming/TestSuiteBase.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
-import spark.streaming.dstream.{InputDStream, ForEachDStream}
-import spark.streaming.util.ManualClock
+import org.apache.spark.streaming.dstream.{InputDStream, ForEachDStream}
+import org.apache.spark.streaming.util.ManualClock
-import spark.{RDD, Logging}
+import org.apache.spark.{RDD, Logging}
import collection.mutable.ArrayBuffer
import collection.mutable.SynchronizedBuffer
diff --git a/streaming/src/test/scala/spark/streaming/WindowOperationsSuite.scala b/streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala
index 894b765fc6..f50e05c0d8 100644
--- a/streaming/src/test/scala/spark/streaming/WindowOperationsSuite.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/WindowOperationsSuite.scala
@@ -15,14 +15,14 @@
* limitations under the License.
*/
-package spark.streaming
+package org.apache.spark.streaming
-import spark.streaming.StreamingContext._
+import org.apache.spark.streaming.StreamingContext._
import collection.mutable.ArrayBuffer
class WindowOperationsSuite extends TestSuiteBase {
- System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
+ System.setProperty("spark.streaming.clock", "org.apache.spark.streaming.util.ManualClock")
override def framework = "WindowOperationsSuite"
diff --git a/tools/pom.xml b/tools/pom.xml
index 95b5e80e5b..664bf7dcbd 100644
--- a/tools/pom.xml
+++ b/tools/pom.xml
@@ -18,13 +18,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-tools</artifactId>
<packaging>jar</packaging>
<name>Spark Project Tools</name>
@@ -32,12 +32,12 @@
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-streaming</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/tools/src/main/scala/spark/tools/JavaAPICompletenessChecker.scala b/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
index f45d0b281c..50335e5736 100644
--- a/tools/src/main/scala/spark/tools/JavaAPICompletenessChecker.scala
+++ b/tools/src/main/scala/org/apache/spark/tools/JavaAPICompletenessChecker.scala
@@ -15,17 +15,17 @@
* limitations under the License.
*/
-package spark.tools
+package org.apache.spark.tools
import java.lang.reflect.Method
import scala.collection.mutable.ArrayBuffer
-import spark._
-import spark.api.java._
-import spark.rdd.OrderedRDDFunctions
-import spark.streaming.{PairDStreamFunctions, DStream, StreamingContext}
-import spark.streaming.api.java.{JavaPairDStream, JavaDStream, JavaStreamingContext}
+import org.apache.spark._
+import org.apache.spark.api.java._
+import org.apache.spark.rdd.OrderedRDDFunctions
+import org.apache.spark.streaming.{PairDStreamFunctions, DStream, StreamingContext}
+import org.apache.spark.streaming.api.java.{JavaPairDStream, JavaDStream, JavaStreamingContext}
private[spark] abstract class SparkType(val name: String)
@@ -129,7 +129,7 @@ object JavaAPICompletenessChecker {
// TODO: the JavaStreamingContext API accepts Array arguments
// instead of Lists, so this isn't a trivial translation / sub:
"scala.collection.Seq" -> "java.util.List",
- "scala.Function2" -> "spark.api.java.function.Function2",
+ "scala.Function2" -> "org.apache.spark.api.java.function.Function2",
"scala.collection.Iterator" -> "java.util.Iterator",
"scala.collection.mutable.Queue" -> "java.util.Queue",
"double" -> "java.lang.Double"
@@ -139,7 +139,7 @@ object JavaAPICompletenessChecker {
scalaType match {
case ParameterizedType(name, parameters, typebounds) =>
name match {
- case "spark.RDD" =>
+ case "org.apache.spark.RDD" =>
if (parameters(0).name == classOf[Tuple2[_, _]].getName) {
val tupleParams =
parameters(0).asInstanceOf[ParameterizedType].parameters.map(applySubs)
@@ -147,13 +147,13 @@ object JavaAPICompletenessChecker {
} else {
ParameterizedType(classOf[JavaRDD[_]].getName, parameters.map(applySubs))
}
- case "spark.streaming.DStream" =>
+ case "org.apache.spark.streaming.DStream" =>
if (parameters(0).name == classOf[Tuple2[_, _]].getName) {
val tupleParams =
parameters(0).asInstanceOf[ParameterizedType].parameters.map(applySubs)
- ParameterizedType("spark.streaming.api.java.JavaPairDStream", tupleParams)
+ ParameterizedType("org.apache.spark.streaming.api.java.JavaPairDStream", tupleParams)
} else {
- ParameterizedType("spark.streaming.api.java.JavaDStream",
+ ParameterizedType("org.apache.spark.streaming.api.java.JavaDStream",
parameters.map(applySubs))
}
case "scala.Option" => {
@@ -167,14 +167,14 @@ object JavaAPICompletenessChecker {
val firstParamName = parameters.last.name
if (firstParamName.startsWith("scala.collection.Traversable") ||
firstParamName.startsWith("scala.collection.Iterator")) {
- ParameterizedType("spark.api.java.function.FlatMapFunction",
+ ParameterizedType("org.apache.spark.api.java.function.FlatMapFunction",
Seq(parameters(0),
parameters.last.asInstanceOf[ParameterizedType].parameters(0)).map(applySubs))
} else if (firstParamName == "scala.runtime.BoxedUnit") {
- ParameterizedType("spark.api.java.function.VoidFunction",
+ ParameterizedType("org.apache.spark.api.java.function.VoidFunction",
parameters.dropRight(1).map(applySubs))
} else {
- ParameterizedType("spark.api.java.function.Function", parameters.map(applySubs))
+ ParameterizedType("org.apache.spark.api.java.function.Function", parameters.map(applySubs))
}
case _ =>
ParameterizedType(renameSubstitutions.getOrElse(name, name),
@@ -211,85 +211,85 @@ object JavaAPICompletenessChecker {
// This list also includes a few methods that are only used by the web UI or other
// internal Spark components.
val excludedNames = Seq(
- "spark.RDD.origin",
- "spark.RDD.elementClassManifest",
- "spark.RDD.checkpointData",
- "spark.RDD.partitioner",
- "spark.RDD.partitions",
- "spark.RDD.firstParent",
- "spark.RDD.doCheckpoint",
- "spark.RDD.markCheckpointed",
- "spark.RDD.clearDependencies",
- "spark.RDD.getDependencies",
- "spark.RDD.getPartitions",
- "spark.RDD.dependencies",
- "spark.RDD.getPreferredLocations",
- "spark.RDD.collectPartitions",
- "spark.RDD.computeOrReadCheckpoint",
- "spark.PairRDDFunctions.getKeyClass",
- "spark.PairRDDFunctions.getValueClass",
- "spark.SparkContext.stringToText",
- "spark.SparkContext.makeRDD",
- "spark.SparkContext.runJob",
- "spark.SparkContext.runApproximateJob",
- "spark.SparkContext.clean",
- "spark.SparkContext.metadataCleaner",
- "spark.SparkContext.ui",
- "spark.SparkContext.newShuffleId",
- "spark.SparkContext.newRddId",
- "spark.SparkContext.cleanup",
- "spark.SparkContext.receiverJobThread",
- "spark.SparkContext.getRDDStorageInfo",
- "spark.SparkContext.addedFiles",
- "spark.SparkContext.addedJars",
- "spark.SparkContext.persistentRdds",
- "spark.SparkContext.executorEnvs",
- "spark.SparkContext.checkpointDir",
- "spark.SparkContext.getSparkHome",
- "spark.SparkContext.executorMemoryRequested",
- "spark.SparkContext.getExecutorStorageStatus",
- "spark.streaming.DStream.generatedRDDs",
- "spark.streaming.DStream.zeroTime",
- "spark.streaming.DStream.rememberDuration",
- "spark.streaming.DStream.storageLevel",
- "spark.streaming.DStream.mustCheckpoint",
- "spark.streaming.DStream.checkpointDuration",
- "spark.streaming.DStream.checkpointData",
- "spark.streaming.DStream.graph",
- "spark.streaming.DStream.isInitialized",
- "spark.streaming.DStream.parentRememberDuration",
- "spark.streaming.DStream.initialize",
- "spark.streaming.DStream.validate",
- "spark.streaming.DStream.setContext",
- "spark.streaming.DStream.setGraph",
- "spark.streaming.DStream.remember",
- "spark.streaming.DStream.getOrCompute",
- "spark.streaming.DStream.generateJob",
- "spark.streaming.DStream.clearOldMetadata",
- "spark.streaming.DStream.addMetadata",
- "spark.streaming.DStream.updateCheckpointData",
- "spark.streaming.DStream.restoreCheckpointData",
- "spark.streaming.DStream.isTimeValid",
- "spark.streaming.StreamingContext.nextNetworkInputStreamId",
- "spark.streaming.StreamingContext.networkInputTracker",
- "spark.streaming.StreamingContext.checkpointDir",
- "spark.streaming.StreamingContext.checkpointDuration",
- "spark.streaming.StreamingContext.receiverJobThread",
- "spark.streaming.StreamingContext.scheduler",
- "spark.streaming.StreamingContext.initialCheckpoint",
- "spark.streaming.StreamingContext.getNewNetworkStreamId",
- "spark.streaming.StreamingContext.validate",
- "spark.streaming.StreamingContext.createNewSparkContext",
- "spark.streaming.StreamingContext.rddToFileName",
- "spark.streaming.StreamingContext.getSparkCheckpointDir",
- "spark.streaming.StreamingContext.env",
- "spark.streaming.StreamingContext.graph",
- "spark.streaming.StreamingContext.isCheckpointPresent"
+ "org.apache.spark.RDD.origin",
+ "org.apache.spark.RDD.elementClassManifest",
+ "org.apache.spark.RDD.checkpointData",
+ "org.apache.spark.RDD.partitioner",
+ "org.apache.spark.RDD.partitions",
+ "org.apache.spark.RDD.firstParent",
+ "org.apache.spark.RDD.doCheckpoint",
+ "org.apache.spark.RDD.markCheckpointed",
+ "org.apache.spark.RDD.clearDependencies",
+ "org.apache.spark.RDD.getDependencies",
+ "org.apache.spark.RDD.getPartitions",
+ "org.apache.spark.RDD.dependencies",
+ "org.apache.spark.RDD.getPreferredLocations",
+ "org.apache.spark.RDD.collectPartitions",
+ "org.apache.spark.RDD.computeOrReadCheckpoint",
+ "org.apache.spark.PairRDDFunctions.getKeyClass",
+ "org.apache.spark.PairRDDFunctions.getValueClass",
+ "org.apache.spark.SparkContext.stringToText",
+ "org.apache.spark.SparkContext.makeRDD",
+ "org.apache.spark.SparkContext.runJob",
+ "org.apache.spark.SparkContext.runApproximateJob",
+ "org.apache.spark.SparkContext.clean",
+ "org.apache.spark.SparkContext.metadataCleaner",
+ "org.apache.spark.SparkContext.ui",
+ "org.apache.spark.SparkContext.newShuffleId",
+ "org.apache.spark.SparkContext.newRddId",
+ "org.apache.spark.SparkContext.cleanup",
+ "org.apache.spark.SparkContext.receiverJobThread",
+ "org.apache.spark.SparkContext.getRDDStorageInfo",
+ "org.apache.spark.SparkContext.addedFiles",
+ "org.apache.spark.SparkContext.addedJars",
+ "org.apache.spark.SparkContext.persistentRdds",
+ "org.apache.spark.SparkContext.executorEnvs",
+ "org.apache.spark.SparkContext.checkpointDir",
+ "org.apache.spark.SparkContext.getSparkHome",
+ "org.apache.spark.SparkContext.executorMemoryRequested",
+ "org.apache.spark.SparkContext.getExecutorStorageStatus",
+ "org.apache.spark.streaming.DStream.generatedRDDs",
+ "org.apache.spark.streaming.DStream.zeroTime",
+ "org.apache.spark.streaming.DStream.rememberDuration",
+ "org.apache.spark.streaming.DStream.storageLevel",
+ "org.apache.spark.streaming.DStream.mustCheckpoint",
+ "org.apache.spark.streaming.DStream.checkpointDuration",
+ "org.apache.spark.streaming.DStream.checkpointData",
+ "org.apache.spark.streaming.DStream.graph",
+ "org.apache.spark.streaming.DStream.isInitialized",
+ "org.apache.spark.streaming.DStream.parentRememberDuration",
+ "org.apache.spark.streaming.DStream.initialize",
+ "org.apache.spark.streaming.DStream.validate",
+ "org.apache.spark.streaming.DStream.setContext",
+ "org.apache.spark.streaming.DStream.setGraph",
+ "org.apache.spark.streaming.DStream.remember",
+ "org.apache.spark.streaming.DStream.getOrCompute",
+ "org.apache.spark.streaming.DStream.generateJob",
+ "org.apache.spark.streaming.DStream.clearOldMetadata",
+ "org.apache.spark.streaming.DStream.addMetadata",
+ "org.apache.spark.streaming.DStream.updateCheckpointData",
+ "org.apache.spark.streaming.DStream.restoreCheckpointData",
+ "org.apache.spark.streaming.DStream.isTimeValid",
+ "org.apache.spark.streaming.StreamingContext.nextNetworkInputStreamId",
+ "org.apache.spark.streaming.StreamingContext.networkInputTracker",
+ "org.apache.spark.streaming.StreamingContext.checkpointDir",
+ "org.apache.spark.streaming.StreamingContext.checkpointDuration",
+ "org.apache.spark.streaming.StreamingContext.receiverJobThread",
+ "org.apache.spark.streaming.StreamingContext.scheduler",
+ "org.apache.spark.streaming.StreamingContext.initialCheckpoint",
+ "org.apache.spark.streaming.StreamingContext.getNewNetworkStreamId",
+ "org.apache.spark.streaming.StreamingContext.validate",
+ "org.apache.spark.streaming.StreamingContext.createNewSparkContext",
+ "org.apache.spark.streaming.StreamingContext.rddToFileName",
+ "org.apache.spark.streaming.StreamingContext.getSparkCheckpointDir",
+ "org.apache.spark.streaming.StreamingContext.env",
+ "org.apache.spark.streaming.StreamingContext.graph",
+ "org.apache.spark.streaming.StreamingContext.isCheckpointPresent"
)
val excludedPatterns = Seq(
- """^spark\.SparkContext\..*To.*Functions""",
- """^spark\.SparkContext\..*WritableConverter""",
- """^spark\.SparkContext\..*To.*Writable"""
+ """^org\.apache\.spark\.SparkContext\..*To.*Functions""",
+ """^org\.apache\.spark\.SparkContext\..*WritableConverter""",
+ """^org\.apache\.spark\.SparkContext\..*To.*Writable"""
).map(_.r)
lazy val excludedByPattern =
!excludedPatterns.map(_.findFirstIn(name)).filter(_.isDefined).isEmpty
@@ -298,7 +298,7 @@ object JavaAPICompletenessChecker {
private def isExcludedByInterface(method: Method): Boolean = {
val excludedInterfaces =
- Set("spark.Logging", "org.apache.hadoop.mapreduce.HadoopMapReduceUtil")
+ Set("org.apache.spark.Logging", "org.apache.hadoop.mapreduce.HadoopMapReduceUtil")
def toComparisionKey(method: Method) =
(method.getReturnType, method.getName, method.getGenericReturnType)
val interfaces = method.getDeclaringClass.getInterfaces.filter { i =>
diff --git a/yarn/pom.xml b/yarn/pom.xml
index 07dd170eae..a2afbeabff 100644
--- a/yarn/pom.xml
+++ b/yarn/pom.xml
@@ -18,13 +18,13 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-parent</artifactId>
<version>0.8.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-yarn</artifactId>
<packaging>jar</packaging>
<name>Spark Project YARN Support</name>
@@ -81,7 +81,7 @@
<id>hadoop2-yarn</id>
<dependencies>
<dependency>
- <groupId>org.spark-project</groupId>
+ <groupId>org.apache.spark</groupId>
<artifactId>spark-core</artifactId>
<version>${project.version}</version>
</dependency>
diff --git a/yarn/src/main/scala/spark/deploy/yarn/ApplicationMaster.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index 0f3b6bc1a6..139a977a03 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/ApplicationMaster.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
import java.net.Socket
import java.util.concurrent.CopyOnWriteArrayList
@@ -29,7 +29,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.apache.hadoop.yarn.ipc.YarnRPC
import org.apache.hadoop.yarn.util.{ConverterUtils, Records}
import scala.collection.JavaConversions._
-import spark.{SparkContext, Logging, Utils}
+import org.apache.spark.{SparkContext, Logging, Utils}
import org.apache.hadoop.security.UserGroupInformation
import java.security.PrivilegedExceptionAction
diff --git a/yarn/src/main/scala/spark/deploy/yarn/ApplicationMasterArguments.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala
index 8de44b1f66..f47e23b63f 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/ApplicationMasterArguments.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMasterArguments.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
-import spark.util.IntParam
+import org.apache.spark.util.IntParam
import collection.mutable.ArrayBuffer
class ApplicationMasterArguments(val args: Array[String]) {
diff --git a/yarn/src/main/scala/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index eb2a8cc642..48e737ed79 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/Client.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
import java.net.{InetSocketAddress, URI}
import java.nio.ByteBuffer
@@ -33,10 +33,10 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.apache.hadoop.yarn.ipc.YarnRPC
import scala.collection.mutable.HashMap
import scala.collection.JavaConversions._
-import spark.{Logging, Utils}
+import org.apache.spark.{Logging, Utils}
import org.apache.hadoop.yarn.util.{Apps, Records, ConverterUtils}
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
-import spark.deploy.SparkHadoopUtil
+import org.apache.spark.deploy.SparkHadoopUtil
class Client(conf: Configuration, args: ClientArguments) extends YarnClientImpl with Logging {
diff --git a/yarn/src/main/scala/spark/deploy/yarn/ClientArguments.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
index 67aff03781..6cbfadc23b 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/ClientArguments.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala
@@ -15,12 +15,12 @@
* limitations under the License.
*/
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
-import spark.util.MemoryParam
-import spark.util.IntParam
+import org.apache.spark.util.MemoryParam
+import org.apache.spark.util.IntParam
import collection.mutable.{ArrayBuffer, HashMap}
-import spark.scheduler.{InputFormatInfo, SplitInfo}
+import org.apache.spark.scheduler.{InputFormatInfo, SplitInfo}
// TODO: Add code and support for ensuring that yarn resource 'asks' are location aware !
class ClientArguments(val args: Array[String]) {
diff --git a/yarn/src/main/scala/spark/deploy/yarn/WorkerRunnable.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/WorkerRunnable.scala
index 0e1fd9b680..72dcf7178e 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/WorkerRunnable.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/WorkerRunnable.scala
@@ -15,7 +15,7 @@
* limitations under the License.
*/
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
import java.net.URI
import java.nio.ByteBuffer
@@ -37,7 +37,7 @@ import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
import scala.collection.JavaConversions._
import scala.collection.mutable.HashMap
-import spark.{Logging, Utils}
+import org.apache.spark.{Logging, Utils}
class WorkerRunnable(container: Container, conf: Configuration, masterAddress: String,
slaveId: String, hostname: String, workerMemory: Int, workerCores: Int)
@@ -119,7 +119,7 @@ class WorkerRunnable(container: Container, conf: Configuration, masterAddress: S
// TODO: If the OOM is not recoverable by rescheduling it on different node, then do 'something' to fail job ... akin to blacklisting trackers in mapred ?
" -XX:OnOutOfMemoryError='kill %p' " +
JAVA_OPTS +
- " spark.executor.StandaloneExecutorBackend " +
+ " org.apache.spark.executor.StandaloneExecutorBackend " +
masterAddress + " " +
slaveId + " " +
hostname + " " +
diff --git a/yarn/src/main/scala/spark/deploy/yarn/YarnAllocationHandler.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocationHandler.scala
index b0af8baf08..26ff214e12 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/YarnAllocationHandler.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocationHandler.scala
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
-import spark.{Logging, Utils}
-import spark.scheduler.SplitInfo
+import org.apache.spark.{Logging, Utils}
+import org.apache.spark.scheduler.SplitInfo
import scala.collection
import org.apache.hadoop.yarn.api.records.{AMResponse, ApplicationAttemptId, ContainerId, Priority, Resource, ResourceRequest, ContainerStatus, Container}
-import spark.scheduler.cluster.{ClusterScheduler, StandaloneSchedulerBackend}
+import org.apache.spark.scheduler.cluster.{ClusterScheduler, StandaloneSchedulerBackend}
import org.apache.hadoop.yarn.api.protocolrecords.{AllocateRequest, AllocateResponse}
import org.apache.hadoop.yarn.util.{RackResolver, Records}
import java.util.concurrent.{CopyOnWriteArrayList, ConcurrentHashMap}
diff --git a/yarn/src/main/scala/spark/deploy/yarn/YarnSparkHadoopUtil.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
index 77c4ee7f3f..ca2f1e2565 100644
--- a/yarn/src/main/scala/spark/deploy/yarn/YarnSparkHadoopUtil.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala
@@ -15,9 +15,9 @@
* limitations under the License.
*/
-package spark.deploy.yarn
+package org.apache.spark.deploy.yarn
-import spark.deploy.SparkHadoopUtil
+import org.apache.spark.deploy.SparkHadoopUtil
import collection.mutable.HashMap
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.security.UserGroupInformation
diff --git a/yarn/src/main/scala/spark/scheduler/cluster/YarnClusterScheduler.scala b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterScheduler.scala
index bb58353e0c..3828ddfc4f 100644
--- a/yarn/src/main/scala/spark/scheduler/cluster/YarnClusterScheduler.scala
+++ b/yarn/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterScheduler.scala
@@ -15,10 +15,10 @@
* limitations under the License.
*/
-package spark.scheduler.cluster
+package org.apache.spark.scheduler.cluster
-import spark._
-import spark.deploy.yarn.{ApplicationMaster, YarnAllocationHandler}
+import org.apache.spark._
+import org.apache.spark.deploy.yarn.{ApplicationMaster, YarnAllocationHandler}
import org.apache.hadoop.conf.Configuration
/**