From f08db010d3ba4eff132f5c06a087b0c7c2e58576 Mon Sep 17 00:00:00 2001 From: kalpit Date: Wed, 6 Mar 2013 18:06:32 -0800 Subject: added SPARK_WORKER_INSTANCES : allows spawning multiple worker instances/processes on every slave machine --- conf/spark-env.sh.template | 1 + 1 file changed, 1 insertion(+) (limited to 'conf') diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template index 6d71ec5691..37565ca827 100755 --- a/conf/spark-env.sh.template +++ b/conf/spark-env.sh.template @@ -12,6 +12,7 @@ # - SPARK_WORKER_CORES, to set the number of cores to use on this machine # - SPARK_WORKER_MEMORY, to set how much memory to use (e.g. 1000m, 2g) # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT +# - SPARK_WORKER_INSTANCES, to set the number of worker instances/processes to be spawned on every slave machine # # Finally, Spark also relies on the following variables, but these can be set # on just the *master* (i.e. in your driver program), and will automatically -- cgit v1.2.3