aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala/org/apache
diff options
context:
space:
mode:
authorPatrick Wendell <patrick@databricks.com>2015-03-23 15:08:39 -0700
committerPatrick Wendell <patrick@databricks.com>2015-03-23 15:08:39 -0700
commit6cd7058b369ec8d01938961f148734ee9eaf76de (patch)
tree372cffb039a073787822c69bd101d825ea5189f6 /core/src/main/scala/org/apache
parent474d1320c9b93c501710ad1cfa836b8284562a2c (diff)
downloadspark-6cd7058b369ec8d01938961f148734ee9eaf76de.tar.gz
spark-6cd7058b369ec8d01938961f148734ee9eaf76de.tar.bz2
spark-6cd7058b369ec8d01938961f148734ee9eaf76de.zip
Revert "[SPARK-6122][Core] Upgrade Tachyon client version to 0.6.1."
This reverts commit a41b9c6004cfee84bd56dfa1faf5a0cf084551ae.
Diffstat (limited to 'core/src/main/scala/org/apache')
-rw-r--r--core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala27
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala4
2 files changed, 15 insertions, 16 deletions
diff --git a/core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala
index 2ab6a8f3ec..af87303421 100644
--- a/core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala
@@ -20,8 +20,8 @@ package org.apache.spark.storage
import java.text.SimpleDateFormat
import java.util.{Date, Random}
-import tachyon.TachyonURI
-import tachyon.client.{TachyonFile, TachyonFS}
+import tachyon.client.TachyonFS
+import tachyon.client.TachyonFile
import org.apache.spark.Logging
import org.apache.spark.executor.ExecutorExitCode
@@ -40,7 +40,7 @@ private[spark] class TachyonBlockManager(
val master: String)
extends Logging {
- val client = if (master != null && master != "") TachyonFS.get(new TachyonURI(master)) else null
+ val client = if (master != null && master != "") TachyonFS.get(master) else null
if (client == null) {
logError("Failed to connect to the Tachyon as the master address is not configured")
@@ -60,11 +60,11 @@ private[spark] class TachyonBlockManager(
addShutdownHook()
def removeFile(file: TachyonFile): Boolean = {
- client.delete(new TachyonURI(file.getPath()), false)
+ client.delete(file.getPath(), false)
}
def fileExists(file: TachyonFile): Boolean = {
- client.exist(new TachyonURI(file.getPath()))
+ client.exist(file.getPath())
}
def getFile(filename: String): TachyonFile = {
@@ -81,7 +81,7 @@ private[spark] class TachyonBlockManager(
if (old != null) {
old
} else {
- val path = new TachyonURI(s"${tachyonDirs(dirId)}/${"%02x".format(subDirId)}")
+ val path = tachyonDirs(dirId) + "/" + "%02x".format(subDirId)
client.mkdir(path)
val newDir = client.getFile(path)
subDirs(dirId)(subDirId) = newDir
@@ -89,7 +89,7 @@ private[spark] class TachyonBlockManager(
}
}
}
- val filePath = new TachyonURI(s"$subDir/$filename")
+ val filePath = subDir + "/" + filename
if(!client.exist(filePath)) {
client.createFile(filePath)
}
@@ -101,7 +101,7 @@ private[spark] class TachyonBlockManager(
// TODO: Some of the logic here could be consolidated/de-duplicated with that in the DiskStore.
private def createTachyonDirs(): Array[TachyonFile] = {
- logDebug(s"Creating tachyon directories at root dirs '$rootDirs'")
+ logDebug("Creating tachyon directories at root dirs '" + rootDirs + "'")
val dateFormat = new SimpleDateFormat("yyyyMMddHHmmss")
rootDirs.split(",").map { rootDir =>
var foundLocalDir = false
@@ -113,21 +113,22 @@ private[spark] class TachyonBlockManager(
tries += 1
try {
tachyonDirId = "%s-%04x".format(dateFormat.format(new Date), rand.nextInt(65536))
- val path = new TachyonURI(s"$rootDir/spark-tachyon-$tachyonDirId")
+ val path = rootDir + "/" + "spark-tachyon-" + tachyonDirId
if (!client.exist(path)) {
foundLocalDir = client.mkdir(path)
tachyonDir = client.getFile(path)
}
} catch {
case e: Exception =>
- logWarning(s"Attempt $tries to create tachyon dir $tachyonDir failed", e)
+ logWarning("Attempt " + tries + " to create tachyon dir " + tachyonDir + " failed", e)
}
}
if (!foundLocalDir) {
- logError(s"Failed $MAX_DIR_CREATION_ATTEMPTS attempts to create tachyon dir in $rootDir")
+ logError("Failed " + MAX_DIR_CREATION_ATTEMPTS + " attempts to create tachyon dir in " +
+ rootDir)
System.exit(ExecutorExitCode.TACHYON_STORE_FAILED_TO_CREATE_DIR)
}
- logInfo(s"Created tachyon directory at $tachyonDir")
+ logInfo("Created tachyon directory at " + tachyonDir)
tachyonDir
}
}
@@ -144,7 +145,7 @@ private[spark] class TachyonBlockManager(
}
} catch {
case e: Exception =>
- logError(s"Exception while deleting tachyon spark dir: $tachyonDir", e)
+ logError("Exception while deleting tachyon spark dir: " + tachyonDir, e)
}
}
client.close()
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 91d833295e..fa56bb09e4 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -42,8 +42,6 @@ import org.apache.hadoop.security.UserGroupInformation
import org.apache.log4j.PropertyConfigurator
import org.eclipse.jetty.util.MultiException
import org.json4s._
-
-import tachyon.TachyonURI
import tachyon.client.{TachyonFS, TachyonFile}
import org.apache.spark._
@@ -972,7 +970,7 @@ private[spark] object Utils extends Logging {
* Delete a file or directory and its contents recursively.
*/
def deleteRecursively(dir: TachyonFile, client: TachyonFS) {
- if (!client.delete(new TachyonURI(dir.getPath()), true)) {
+ if (!client.delete(dir.getPath(), true)) {
throw new IOException("Failed to delete the tachyon dir: " + dir)
}
}