aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala
blob: 043945a211f26948d9ad7a3d99b80951acb5b23a (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.spark.deploy.master

import java.io._

import scala.Serializable

import akka.serialization.Serialization
import org.apache.spark.Logging

/**
 * Stores data in a single on-disk directory with one file per application and worker.
 * Files are deleted when applications and workers are removed.
 *
 * @param dir Directory to store files. Created if non-existent (but not recursively).
 * @param serialization Used to serialize our objects.
 */
private[spark] class FileSystemPersistenceEngine(
    val dir: String,
    val serialization: Serialization)
  extends PersistenceEngine with Logging {

  new File(dir).mkdir()

  override def addApplication(app: ApplicationInfo) {
    val appFile = new File(dir + File.separator + "app_" + app.id)
    serializeIntoFile(appFile, app)
  }

  override def removeApplication(app: ApplicationInfo) {
    new File(dir + File.separator + "app_" + app.id).delete()
  }

  override def addWorker(worker: WorkerInfo) {
    val workerFile = new File(dir + File.separator + "worker_" + worker.id)
    serializeIntoFile(workerFile, worker)
  }

  override def removeWorker(worker: WorkerInfo) {
    new File(dir + File.separator + "worker_" + worker.id).delete()
  }

  override def readPersistedData(): (Seq[ApplicationInfo], Seq[WorkerInfo]) = {
    val sortedFiles = new File(dir).listFiles().sortBy(_.getName)
    val appFiles = sortedFiles.filter(_.getName.startsWith("app_"))
    val apps = appFiles.map(deserializeFromFile[ApplicationInfo])
    val workerFiles = sortedFiles.filter(_.getName.startsWith("worker_"))
    val workers = workerFiles.map(deserializeFromFile[WorkerInfo])
    (apps, workers)
  }

  private def serializeIntoFile(file: File, value: AnyRef) {
    val created = file.createNewFile()
    if (!created) { throw new IllegalStateException("Could not create file: " + file) }

    val serializer = serialization.findSerializerFor(value)
    val serialized = serializer.toBinary(value)

    val out = new FileOutputStream(file)
    out.write(serialized)
    out.close()
  }

  def deserializeFromFile[T](file: File)(implicit m: Manifest[T]): T = {
    val fileData = new Array[Byte](file.length().asInstanceOf[Int])
    val dis = new DataInputStream(new FileInputStream(file))
    dis.readFully(fileData)
    dis.close()

    val clazz = m.runtimeClass.asInstanceOf[Class[T]]
    val serializer = serialization.serializerFor(clazz)
    serializer.fromBinary(fileData).asInstanceOf[T]
  }
}