aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/configuration.md11
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java2
-rw-r--r--launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java3
-rw-r--r--launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java18
4 files changed, 32 insertions, 2 deletions
diff --git a/docs/configuration.md b/docs/configuration.md
index d0c76aaad0..6e98f67b73 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -1786,6 +1786,14 @@ showDF(properties, numRows = 200, truncate = FALSE)
Executable for executing R scripts in client modes for driver. Ignored in cluster modes.
</td>
</tr>
+<tr>
+ <td><code>spark.r.shell.command</code></td>
+ <td>R</td>
+ <td>
+ Executable for executing sparkR shell in client modes for driver. Ignored in cluster modes. It is the same as environment variable <code>SPARKR_DRIVER_R</code>, but take precedence over it.
+ <code>spark.r.shell.command</code> is used for sparkR shell while <code>spark.r.driver.command</code> is used for running R script.
+ </td>
+</tr>
</table>
#### Deploy
@@ -1852,7 +1860,8 @@ The following variables can be set in `spark-env.sh`:
</tr>
<tr>
<td><code>SPARKR_DRIVER_R</code></td>
- <td>R binary executable to use for SparkR shell (default is <code>R</code>).</td>
+ <td>R binary executable to use for SparkR shell (default is <code>R</code>).
+ Property <code>spark.r.shell.command</code> take precedence if it is set</td>
</tr>
<tr>
<td><code>SPARK_LOCAL_IP</code></td>
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
index 7b7a7bf57b..ea56214d23 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java
@@ -68,6 +68,8 @@ public class SparkLauncher {
static final String PYSPARK_PYTHON = "spark.pyspark.python";
+ static final String SPARKR_R_SHELL = "spark.r.shell.command";
+
/** Logger name to use when launching a child process. */
public static final String CHILD_PROCESS_LOGGER_NAME = "spark.launcher.childProcLoggerName";
diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
index f6da644e4c..29c6d82cdb 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java
@@ -336,7 +336,8 @@ class SparkSubmitCommandBuilder extends AbstractCommandBuilder {
join(File.separator, sparkHome, "R", "lib", "SparkR", "profile", "shell.R"));
List<String> args = new ArrayList<>();
- args.add(firstNonEmpty(System.getenv("SPARKR_DRIVER_R"), "R"));
+ args.add(firstNonEmpty(conf.get(SparkLauncher.SPARKR_R_SHELL),
+ System.getenv("SPARKR_DRIVER_R"), "R"));
return args;
}
diff --git a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
index 16e5a22401..ad2e7a70c4 100644
--- a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
+++ b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java
@@ -173,6 +173,24 @@ public class SparkSubmitCommandBuilderSuite extends BaseSuite {
}
@Test
+ public void testSparkRShell() throws Exception {
+ List<String> sparkSubmitArgs = Arrays.asList(
+ SparkSubmitCommandBuilder.SPARKR_SHELL,
+ "--master=foo",
+ "--deploy-mode=bar",
+ "--conf", "spark.r.shell.command=/usr/bin/R");
+
+ Map<String, String> env = new HashMap<>();
+ List<String> cmd = buildCommand(sparkSubmitArgs, env);
+ assertEquals("/usr/bin/R", cmd.get(cmd.size() - 1));
+ assertEquals(
+ String.format(
+ "\"%s\" \"foo\" \"%s\" \"bar\" \"--conf\" \"spark.r.shell.command=/usr/bin/R\" \"%s\"",
+ parser.MASTER, parser.DEPLOY_MODE, SparkSubmitCommandBuilder.SPARKR_SHELL_RESOURCE),
+ env.get("SPARKR_SUBMIT_ARGS"));
+ }
+
+ @Test
public void testExamplesRunner() throws Exception {
List<String> sparkSubmitArgs = Arrays.asList(
SparkSubmitCommandBuilder.RUN_EXAMPLE,