Skip to content

Commit 6cd7058

Browse files
committed
Revert "[SPARK-6122][Core] Upgrade Tachyon client version to 0.6.1."
This reverts commit a41b9c6.
1 parent 474d132 commit 6cd7058

File tree

4 files changed

+17
-18
lines changed

4 files changed

+17
-18
lines changed

core/pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@
275275
<dependency>
276276
<groupId>org.tachyonproject</groupId>
277277
<artifactId>tachyon-client</artifactId>
278-
<version>0.6.1</version>
278+
<version>0.5.0</version>
279279
<exclusions>
280280
<exclusion>
281281
<groupId>org.apache.hadoop</groupId>

core/src/main/scala/org/apache/spark/storage/TachyonBlockManager.scala

+14-13
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@ package org.apache.spark.storage
2020
import java.text.SimpleDateFormat
2121
import java.util.{Date, Random}
2222

23-
import tachyon.TachyonURI
24-
import tachyon.client.{TachyonFile, TachyonFS}
23+
import tachyon.client.TachyonFS
24+
import tachyon.client.TachyonFile
2525

2626
import org.apache.spark.Logging
2727
import org.apache.spark.executor.ExecutorExitCode
@@ -40,7 +40,7 @@ private[spark] class TachyonBlockManager(
4040
val master: String)
4141
extends Logging {
4242

43-
val client = if (master != null && master != "") TachyonFS.get(new TachyonURI(master)) else null
43+
val client = if (master != null && master != "") TachyonFS.get(master) else null
4444

4545
if (client == null) {
4646
logError("Failed to connect to the Tachyon as the master address is not configured")
@@ -60,11 +60,11 @@ private[spark] class TachyonBlockManager(
6060
addShutdownHook()
6161

6262
def removeFile(file: TachyonFile): Boolean = {
63-
client.delete(new TachyonURI(file.getPath()), false)
63+
client.delete(file.getPath(), false)
6464
}
6565

6666
def fileExists(file: TachyonFile): Boolean = {
67-
client.exist(new TachyonURI(file.getPath()))
67+
client.exist(file.getPath())
6868
}
6969

7070
def getFile(filename: String): TachyonFile = {
@@ -81,15 +81,15 @@ private[spark] class TachyonBlockManager(
8181
if (old != null) {
8282
old
8383
} else {
84-
val path = new TachyonURI(s"${tachyonDirs(dirId)}/${"%02x".format(subDirId)}")
84+
val path = tachyonDirs(dirId) + "/" + "%02x".format(subDirId)
8585
client.mkdir(path)
8686
val newDir = client.getFile(path)
8787
subDirs(dirId)(subDirId) = newDir
8888
newDir
8989
}
9090
}
9191
}
92-
val filePath = new TachyonURI(s"$subDir/$filename")
92+
val filePath = subDir + "/" + filename
9393
if(!client.exist(filePath)) {
9494
client.createFile(filePath)
9595
}
@@ -101,7 +101,7 @@ private[spark] class TachyonBlockManager(
101101

102102
// TODO: Some of the logic here could be consolidated/de-duplicated with that in the DiskStore.
103103
private def createTachyonDirs(): Array[TachyonFile] = {
104-
logDebug(s"Creating tachyon directories at root dirs '$rootDirs'")
104+
logDebug("Creating tachyon directories at root dirs '" + rootDirs + "'")
105105
val dateFormat = new SimpleDateFormat("yyyyMMddHHmmss")
106106
rootDirs.split(",").map { rootDir =>
107107
var foundLocalDir = false
@@ -113,21 +113,22 @@ private[spark] class TachyonBlockManager(
113113
tries += 1
114114
try {
115115
tachyonDirId = "%s-%04x".format(dateFormat.format(new Date), rand.nextInt(65536))
116-
val path = new TachyonURI(s"$rootDir/spark-tachyon-$tachyonDirId")
116+
val path = rootDir + "/" + "spark-tachyon-" + tachyonDirId
117117
if (!client.exist(path)) {
118118
foundLocalDir = client.mkdir(path)
119119
tachyonDir = client.getFile(path)
120120
}
121121
} catch {
122122
case e: Exception =>
123-
logWarning(s"Attempt $tries to create tachyon dir $tachyonDir failed", e)
123+
logWarning("Attempt " + tries + " to create tachyon dir " + tachyonDir + " failed", e)
124124
}
125125
}
126126
if (!foundLocalDir) {
127-
logError(s"Failed $MAX_DIR_CREATION_ATTEMPTS attempts to create tachyon dir in $rootDir")
127+
logError("Failed " + MAX_DIR_CREATION_ATTEMPTS + " attempts to create tachyon dir in " +
128+
rootDir)
128129
System.exit(ExecutorExitCode.TACHYON_STORE_FAILED_TO_CREATE_DIR)
129130
}
130-
logInfo(s"Created tachyon directory at $tachyonDir")
131+
logInfo("Created tachyon directory at " + tachyonDir)
131132
tachyonDir
132133
}
133134
}
@@ -144,7 +145,7 @@ private[spark] class TachyonBlockManager(
144145
}
145146
} catch {
146147
case e: Exception =>
147-
logError(s"Exception while deleting tachyon spark dir: $tachyonDir", e)
148+
logError("Exception while deleting tachyon spark dir: " + tachyonDir, e)
148149
}
149150
}
150151
client.close()

core/src/main/scala/org/apache/spark/util/Utils.scala

+1-3
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,6 @@ import org.apache.hadoop.security.UserGroupInformation
4242
import org.apache.log4j.PropertyConfigurator
4343
import org.eclipse.jetty.util.MultiException
4444
import org.json4s._
45-
46-
import tachyon.TachyonURI
4745
import tachyon.client.{TachyonFS, TachyonFile}
4846

4947
import org.apache.spark._
@@ -972,7 +970,7 @@ private[spark] object Utils extends Logging {
972970
* Delete a file or directory and its contents recursively.
973971
*/
974972
def deleteRecursively(dir: TachyonFile, client: TachyonFS) {
975-
if (!client.delete(new TachyonURI(dir.getPath()), true)) {
973+
if (!client.delete(dir.getPath(), true)) {
976974
throw new IOException("Failed to delete the tachyon dir: " + dir)
977975
}
978976
}

make-distribution.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ SPARK_HOME="$(cd "`dirname "$0"`"; pwd)"
3232
DISTDIR="$SPARK_HOME/dist"
3333

3434
SPARK_TACHYON=false
35-
TACHYON_VERSION="0.6.1"
35+
TACHYON_VERSION="0.5.0"
3636
TACHYON_TGZ="tachyon-${TACHYON_VERSION}-bin.tar.gz"
3737
TACHYON_URL="https://github.com/amplab/tachyon/releases/download/v${TACHYON_VERSION}/${TACHYON_TGZ}"
3838

0 commit comments

Comments
 (0)