diff options
author | Felix Cheung <felixcheung_m@hotmail.com> | 2017-03-19 10:37:15 -0700 |
---|---|---|
committer | Felix Cheung <felixcheung@apache.org> | 2017-03-19 10:37:15 -0700 |
commit | 422aa67d1bb84f913b06e6d94615adb6557e2870 (patch) | |
tree | bf1378ba979f1135dabb24817b9d5e1ca9a4a3fd /core/src/main/scala/org/apache | |
parent | 60262bc951864a7a3874ab3570b723198e99d613 (diff) | |
download | spark-422aa67d1bb84f913b06e6d94615adb6557e2870.tar.gz spark-422aa67d1bb84f913b06e6d94615adb6557e2870.tar.bz2 spark-422aa67d1bb84f913b06e6d94615adb6557e2870.zip |
[SPARK-18817][SPARKR][SQL] change derby log output to temp dir
## What changes were proposed in this pull request?
Passes R `tempdir()` (this is the R session temp dir, shared with other temp files/dirs) to JVM, set System.Property for derby home dir to move derby.log
## How was this patch tested?
Manually, unit tests
With this, these are relocated to under /tmp
```
# ls /tmp/RtmpG2M0cB/
derby.log
```
And they are removed automatically when the R session is ended.
Author: Felix Cheung <felixcheung_m@hotmail.com>
Closes #16330 from felixcheung/rderby.
Diffstat (limited to 'core/src/main/scala/org/apache')
-rw-r--r-- | core/src/main/scala/org/apache/spark/api/r/RRDD.scala | 9 |
1 files changed, 9 insertions, 0 deletions
diff --git a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala index a1a5eb8cf5..72ae0340aa 100644 --- a/core/src/main/scala/org/apache/spark/api/r/RRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/r/RRDD.scala @@ -17,6 +17,7 @@ package org.apache.spark.api.r +import java.io.File import java.util.{Map => JMap} import scala.collection.JavaConverters._ @@ -127,6 +128,14 @@ private[r] object RRDD { sparkConf.setExecutorEnv(name.toString, value.toString) } + if (sparkEnvirMap.containsKey("spark.r.sql.derby.temp.dir") && + System.getProperty("derby.stream.error.file") == null) { + // This must be set before SparkContext is instantiated. + System.setProperty("derby.stream.error.file", + Seq(sparkEnvirMap.get("spark.r.sql.derby.temp.dir").toString, "derby.log") + .mkString(File.separator)) + } + val jsc = new JavaSparkContext(sparkConf) jars.foreach { jar => jsc.addJar(jar) |