/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler import org.apache.spark.serializer.SerializerInstance import java.io.{DataInputStream, DataOutputStream} import java.nio.ByteBuffer import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream import org.apache.spark.util.ByteBufferInputStream import scala.collection.mutable.HashMap import org.apache.spark.executor.TaskMetrics /** * A task to execute on a worker node. */ private[spark] abstract class Task[T](val stageId: Int) extends Serializable { def run(attemptId: Long): T def preferredLocations: Seq[TaskLocation] = Nil var epoch: Long = -1 // Map output tracker epoch. Will be set by TaskScheduler. var metrics: Option[TaskMetrics] = None } /** * Handles transmission of tasks and their dependencies, because this can be slightly tricky. We * need to send the list of JARs and files added to the SparkContext with each task to ensure that * worker nodes find out about it, but we can't make it part of the Task because the user's code in * the task might depend on one of the JARs. Thus we serialize each task as multiple objects, by * first writing out its dependencies. */ private[spark] object Task { /** * Serialize a task and the current app dependencies (files and JARs added to the SparkContext) */ def serializeWithDependencies( task: Task[_], currentFiles: HashMap[String, Long], currentJars: HashMap[String, Long], serializer: SerializerInstance) : ByteBuffer = { val out = new FastByteArrayOutputStream(4096) val dataOut = new DataOutputStream(out) // Write currentFiles dataOut.writeInt(currentFiles.size) for ((name, timestamp) <- currentFiles) { dataOut.writeUTF(name) dataOut.writeLong(timestamp) } // Write currentJars dataOut.writeInt(currentJars.size) for ((name, timestamp) <- currentJars) { dataOut.writeUTF(name) dataOut.writeLong(timestamp) } // Write the task itself and finish dataOut.flush() val taskBytes = serializer.serialize(task).array() out.write(taskBytes) out.trim() ByteBuffer.wrap(out.array) } /** * Deserialize the list of dependencies in a task serialized with serializeWithDependencies, * and return the task itself as a serialized ByteBuffer. The caller can then update its * ClassLoaders and deserialize the task. * * @return (taskFiles, taskJars, taskBytes) */ def deserializeWithDependencies(serializedTask: ByteBuffer) : (HashMap[String, Long], HashMap[String, Long], ByteBuffer) = { val in = new ByteBufferInputStream(serializedTask) val dataIn = new DataInputStream(in) // Read task's files val taskFiles = new HashMap[String, Long]() val numFiles = dataIn.readInt() for (i <- 0 until numFiles) { taskFiles(dataIn.readUTF()) = dataIn.readLong() } // Read task's JARs val taskJars = new HashMap[String, Long]() val numJars = dataIn.readInt() for (i <- 0 until numJars) { taskJars(dataIn.readUTF()) = dataIn.readLong() } // Create a sub-buffer for the rest of the data, which is the serialized Task object val subBuffer = serializedTask.slice() // ByteBufferInputStream will have read just up to task (taskFiles, taskJars, subBuffer) } }