summaryrefslogtreecommitdiff
path: root/site/docs/0.9.0/api/pyspark/pyspark.conf.SparkConf-class.html
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@apache.org>2014-02-03 06:29:51 +0000
committerPatrick Wendell <pwendell@apache.org>2014-02-03 06:29:51 +0000
commit61203830f49cf7f5ce82f70ead2a155e256c02ee (patch)
tree67d83dfa860156196303eaa1f4873e2fefffe904 /site/docs/0.9.0/api/pyspark/pyspark.conf.SparkConf-class.html
parent49ab3963af9b1d79c73f749ad200f7be80785f9f (diff)
downloadspark-website-61203830f49cf7f5ce82f70ead2a155e256c02ee.tar.gz
spark-website-61203830f49cf7f5ce82f70ead2a155e256c02ee.tar.bz2
spark-website-61203830f49cf7f5ce82f70ead2a155e256c02ee.zip
Bumping version in docs.
Diffstat (limited to 'site/docs/0.9.0/api/pyspark/pyspark.conf.SparkConf-class.html')
-rw-r--r--site/docs/0.9.0/api/pyspark/pyspark.conf.SparkConf-class.html447
1 files changed, 447 insertions, 0 deletions
diff --git a/site/docs/0.9.0/api/pyspark/pyspark.conf.SparkConf-class.html b/site/docs/0.9.0/api/pyspark/pyspark.conf.SparkConf-class.html
new file mode 100644
index 000000000..8ebc1b5c1
--- /dev/null
+++ b/site/docs/0.9.0/api/pyspark/pyspark.conf.SparkConf-class.html
@@ -0,0 +1,447 @@
+<?xml version="1.0" encoding="ascii"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+ "DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+<head>
+ <title>pyspark.conf.SparkConf</title>
+ <link rel="stylesheet" href="epydoc.css" type="text/css" />
+ <script type="text/javascript" src="epydoc.js"></script>
+</head>
+
+<body bgcolor="white" text="black" link="blue" vlink="#204080"
+ alink="#204080">
+<!-- ==================== NAVIGATION BAR ==================== -->
+<table class="navbar" border="0" width="100%" cellpadding="0"
+ bgcolor="#a0c0ff" cellspacing="0">
+ <tr valign="middle">
+ <!-- Home link -->
+ <th>&nbsp;&nbsp;&nbsp;<a
+ href="pyspark-module.html">Home</a>&nbsp;&nbsp;&nbsp;</th>
+
+ <!-- Tree link -->
+ <th>&nbsp;&nbsp;&nbsp;<a
+ href="module-tree.html">Trees</a>&nbsp;&nbsp;&nbsp;</th>
+
+ <!-- Index link -->
+ <th>&nbsp;&nbsp;&nbsp;<a
+ href="identifier-index.html">Indices</a>&nbsp;&nbsp;&nbsp;</th>
+
+ <!-- Help link -->
+ <th>&nbsp;&nbsp;&nbsp;<a
+ href="help.html">Help</a>&nbsp;&nbsp;&nbsp;</th>
+
+ <!-- Project homepage -->
+ <th class="navbar" align="right" width="100%">
+ <table border="0" cellpadding="0" cellspacing="0">
+ <tr><th class="navbar" align="center"
+ ><a class="navbar" target="_top" href="http://spark-project.org">PySpark</a></th>
+ </tr></table></th>
+ </tr>
+</table>
+<table width="100%" cellpadding="0" cellspacing="0">
+ <tr valign="top">
+ <td width="100%">
+ <span class="breadcrumbs">
+ <a href="pyspark-module.html">Package&nbsp;pyspark</a> ::
+ <a href="pyspark.conf-module.html">Module&nbsp;conf</a> ::
+ Class&nbsp;SparkConf
+ </span>
+ </td>
+ <td>
+ <table cellpadding="0" cellspacing="0">
+ <!-- hide/show private -->
+ <tr><td align="right"><span class="options"
+ >[<a href="frames.html" target="_top">frames</a
+ >]&nbsp;|&nbsp;<a href="pyspark.conf.SparkConf-class.html"
+ target="_top">no&nbsp;frames</a>]</span></td></tr>
+ </table>
+ </td>
+ </tr>
+</table>
+<!-- ==================== CLASS DESCRIPTION ==================== -->
+<h1 class="epydoc">Class SparkConf</h1><p class="nomargin-top"><span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf">source&nbsp;code</a></span></p>
+<pre class="base-tree">
+object --+
+ |
+ <strong class="uidshort">SparkConf</strong>
+</pre>
+
+<hr />
+<p>Configuration for a Spark application. Used to set various Spark
+ parameters as key-value pairs.</p>
+ <p>Most of the time, you would create a SparkConf object with
+ <code>SparkConf()</code>, which will load values from
+ <code>spark.*</code> Java system properties as well. In this case, any
+ parameters you set directly on the <code>SparkConf</code> object take
+ priority over system properties.</p>
+ <p>For unit tests, you can also call <code>SparkConf(false)</code> to
+ skip loading external settings and get the same configuration no matter
+ what the system properties are.</p>
+ <p>All setter methods in this class support chaining. For example, you
+ can write <code>conf.setMaster(&quot;local&quot;).setAppName(&quot;My
+ app&quot;)</code>.</p>
+ <p>Note that once a SparkConf object is passed to Spark, it is cloned and
+ can no longer be modified by the user.</p>
+
+<!-- ==================== INSTANCE METHODS ==================== -->
+<a name="section-InstanceMethods"></a>
+<table class="summary" border="1" cellpadding="3"
+ cellspacing="0" width="100%" bgcolor="white">
+<tr bgcolor="#70b0f0" class="table-header">
+ <td align="left" colspan="2" class="table-header">
+ <span class="table-header">Instance Methods</span></td>
+</tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a href="pyspark.conf.SparkConf-class.html#__init__" class="summary-sig-name">__init__</a>(<span class="summary-sig-arg">self</span>,
+ <span class="summary-sig-arg">loadDefaults</span>=<span class="summary-sig-default">True</span>,
+ <span class="summary-sig-arg">_jvm</span>=<span class="summary-sig-default">None</span>)</span><br />
+ Create a new Spark configuration.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.__init__">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a name="set"></a><span class="summary-sig-name">set</span>(<span class="summary-sig-arg">self</span>,
+ <span class="summary-sig-arg">key</span>,
+ <span class="summary-sig-arg">value</span>)</span><br />
+ Set a configuration property.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.set">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a name="setMaster"></a><span class="summary-sig-name">setMaster</span>(<span class="summary-sig-arg">self</span>,
+ <span class="summary-sig-arg">value</span>)</span><br />
+ Set master URL to connect to.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.setMaster">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a name="setAppName"></a><span class="summary-sig-name">setAppName</span>(<span class="summary-sig-arg">self</span>,
+ <span class="summary-sig-arg">value</span>)</span><br />
+ Set application name.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.setAppName">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a name="setSparkHome"></a><span class="summary-sig-name">setSparkHome</span>(<span class="summary-sig-arg">self</span>,
+ <span class="summary-sig-arg">value</span>)</span><br />
+ Set path where Spark is installed on worker nodes.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.setSparkHome">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a name="setExecutorEnv"></a><span class="summary-sig-name">setExecutorEnv</span>(<span class="summary-sig-arg">self</span>,
+ <span class="summary-sig-arg">key</span>=<span class="summary-sig-default">None</span>,
+ <span class="summary-sig-arg">value</span>=<span class="summary-sig-default">None</span>,
+ <span class="summary-sig-arg">pairs</span>=<span class="summary-sig-default">None</span>)</span><br />
+ Set an environment variable to be passed to executors.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.setExecutorEnv">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a href="pyspark.conf.SparkConf-class.html#setAll" class="summary-sig-name">setAll</a>(<span class="summary-sig-arg">self</span>,
+ <span class="summary-sig-arg">pairs</span>)</span><br />
+ Set multiple parameters, passed as a list of key-value pairs.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.setAll">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a name="get"></a><span class="summary-sig-name">get</span>(<span class="summary-sig-arg">self</span>,
+ <span class="summary-sig-arg">key</span>,
+ <span class="summary-sig-arg">defaultValue</span>=<span class="summary-sig-default">None</span>)</span><br />
+ Get the configured value for some key, or return a default otherwise.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.get">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a name="getAll"></a><span class="summary-sig-name">getAll</span>(<span class="summary-sig-arg">self</span>)</span><br />
+ Get all values as a list of key-value pairs.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.getAll">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a name="contains"></a><span class="summary-sig-name">contains</span>(<span class="summary-sig-arg">self</span>,
+ <span class="summary-sig-arg">key</span>)</span><br />
+ Does this configuration contain a given key?</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.contains">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+<tr>
+ <td width="15%" align="right" valign="top" class="summary">
+ <span class="summary-type">&nbsp;</span>
+ </td><td class="summary">
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr>
+ <td><span class="summary-sig"><a name="toDebugString"></a><span class="summary-sig-name">toDebugString</span>(<span class="summary-sig-arg">self</span>)</span><br />
+ Returns a printable version of the configuration, as a list of
+ key=value pairs, one per line.</td>
+ <td align="right" valign="top">
+ <span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.toDebugString">source&nbsp;code</a></span>
+
+ </td>
+ </tr>
+ </table>
+
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2" class="summary">
+ <p class="indent-wrapped-lines"><b>Inherited from <code>object</code></b>:
+ <code>__delattr__</code>,
+ <code>__format__</code>,
+ <code>__getattribute__</code>,
+ <code>__hash__</code>,
+ <code>__new__</code>,
+ <code>__reduce__</code>,
+ <code>__reduce_ex__</code>,
+ <code>__repr__</code>,
+ <code>__setattr__</code>,
+ <code>__sizeof__</code>,
+ <code>__str__</code>,
+ <code>__subclasshook__</code>
+ </p>
+ </td>
+ </tr>
+</table>
+<!-- ==================== PROPERTIES ==================== -->
+<a name="section-Properties"></a>
+<table class="summary" border="1" cellpadding="3"
+ cellspacing="0" width="100%" bgcolor="white">
+<tr bgcolor="#70b0f0" class="table-header">
+ <td align="left" colspan="2" class="table-header">
+ <span class="table-header">Properties</span></td>
+</tr>
+ <tr>
+ <td colspan="2" class="summary">
+ <p class="indent-wrapped-lines"><b>Inherited from <code>object</code></b>:
+ <code>__class__</code>
+ </p>
+ </td>
+ </tr>
+</table>
+<!-- ==================== METHOD DETAILS ==================== -->
+<a name="section-MethodDetails"></a>
+<table class="details" border="1" cellpadding="3"
+ cellspacing="0" width="100%" bgcolor="white">
+<tr bgcolor="#70b0f0" class="table-header">
+ <td align="left" colspan="2" class="table-header">
+ <span class="table-header">Method Details</span></td>
+</tr>
+</table>
+<a name="__init__"></a>
+<div>
+<table class="details" border="1" cellpadding="3"
+ cellspacing="0" width="100%" bgcolor="white">
+<tr><td>
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr valign="top"><td>
+ <h3 class="epydoc"><span class="sig"><span class="sig-name">__init__</span>(<span class="sig-arg">self</span>,
+ <span class="sig-arg">loadDefaults</span>=<span class="sig-default">True</span>,
+ <span class="sig-arg">_jvm</span>=<span class="sig-default">None</span>)</span>
+ <br /><em class="fname">(Constructor)</em>
+ </h3>
+ </td><td align="right" valign="top"
+ ><span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.__init__">source&nbsp;code</a></span>&nbsp;
+ </td>
+ </tr></table>
+
+ <p>Create a new Spark configuration.</p>
+ <dl class="fields">
+ <dt>Parameters:</dt>
+ <dd><ul class="nomargin-top">
+ <li><strong class="pname"><code>loadDefaults</code></strong> - whether to load values from Java system properties (True by
+ default)</li>
+ <li><strong class="pname"><code>_jvm</code></strong> - internal parameter used to pass a handle to the Java VM; does not
+ need to be set by users</li>
+ </ul></dd>
+ <dt>Overrides:
+ object.__init__
+ </dt>
+ </dl>
+</td></tr></table>
+</div>
+<a name="setAll"></a>
+<div>
+<table class="details" border="1" cellpadding="3"
+ cellspacing="0" width="100%" bgcolor="white">
+<tr><td>
+ <table width="100%" cellpadding="0" cellspacing="0" border="0">
+ <tr valign="top"><td>
+ <h3 class="epydoc"><span class="sig"><span class="sig-name">setAll</span>(<span class="sig-arg">self</span>,
+ <span class="sig-arg">pairs</span>)</span>
+ </h3>
+ </td><td align="right" valign="top"
+ ><span class="codelink"><a href="pyspark.conf-pysrc.html#SparkConf.setAll">source&nbsp;code</a></span>&nbsp;
+ </td>
+ </tr></table>
+
+ <p>Set multiple parameters, passed as a list of key-value pairs.</p>
+ <dl class="fields">
+ <dt>Parameters:</dt>
+ <dd><ul class="nomargin-top">
+ <li><strong class="pname"><code>pairs</code></strong> - list of key-value pairs to set</li>
+ </ul></dd>
+ </dl>
+</td></tr></table>
+</div>
+<br />
+<!-- ==================== NAVIGATION BAR ==================== -->
+<table class="navbar" border="0" width="100%" cellpadding="0"
+ bgcolor="#a0c0ff" cellspacing="0">
+ <tr valign="middle">
+ <!-- Home link -->
+ <th>&nbsp;&nbsp;&nbsp;<a
+ href="pyspark-module.html">Home</a>&nbsp;&nbsp;&nbsp;</th>
+
+ <!-- Tree link -->
+ <th>&nbsp;&nbsp;&nbsp;<a
+ href="module-tree.html">Trees</a>&nbsp;&nbsp;&nbsp;</th>
+
+ <!-- Index link -->
+ <th>&nbsp;&nbsp;&nbsp;<a
+ href="identifier-index.html">Indices</a>&nbsp;&nbsp;&nbsp;</th>
+
+ <!-- Help link -->
+ <th>&nbsp;&nbsp;&nbsp;<a
+ href="help.html">Help</a>&nbsp;&nbsp;&nbsp;</th>
+
+ <!-- Project homepage -->
+ <th class="navbar" align="right" width="100%">
+ <table border="0" cellpadding="0" cellspacing="0">
+ <tr><th class="navbar" align="center"
+ ><a class="navbar" target="_top" href="http://spark-project.org">PySpark</a></th>
+ </tr></table></th>
+ </tr>
+</table>
+<table border="0" cellpadding="0" cellspacing="0" width="100%%">
+ <tr>
+ <td align="left" class="footer">
+ Generated by Epydoc 3.0.1 on Sun Feb 2 22:20:29 2014
+ </td>
+ <td align="right" class="footer">
+ <a target="mainFrame" href="http://epydoc.sourceforge.net"
+ >http://epydoc.sourceforge.net</a>
+ </td>
+ </tr>
+</table>
+
+<script type="text/javascript">
+ <!--
+ // Private objects are initially displayed (because if
+ // javascript is turned off then we want them to be
+ // visible); but by default, we want to hide them. So hide
+ // them unless we have a cookie that says to show them.
+ checkCookie();
+ // -->
+</script>
+</body>
+</html>