summaryrefslogblamecommitdiff
path: root/site/docs/0.9.0/api/pyspark/pyspark.conf-pysrc.html
blob: 93cb077adda355da1c06cd4caa7ed853d78db263 (plain) (tree)














































































































































































































































































                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                       
                                                         


















                                                                
<?xml version="1.0" encoding="ascii"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
          "DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
  <title>pyspark.conf</title>
  <link rel="stylesheet" href="epydoc.css" type="text/css" />
  <script type="text/javascript" src="epydoc.js"></script>
</head>

<body bgcolor="white" text="black" link="blue" vlink="#204080"
      alink="#204080">
<!-- ==================== NAVIGATION BAR ==================== -->
<table class="navbar" border="0" width="100%" cellpadding="0"
       bgcolor="#a0c0ff" cellspacing="0">
  <tr valign="middle">
  <!-- Home link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="pyspark-module.html">Home</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Tree link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="module-tree.html">Trees</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Index link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="identifier-index.html">Indices</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Help link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="help.html">Help</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Project homepage -->
      <th class="navbar" align="right" width="100%">
        <table border="0" cellpadding="0" cellspacing="0">
          <tr><th class="navbar" align="center"
            ><a class="navbar" target="_top" href="http://spark-project.org">PySpark</a></th>
          </tr></table></th>
  </tr>
</table>
<table width="100%" cellpadding="0" cellspacing="0">
  <tr valign="top">
    <td width="100%">
      <span class="breadcrumbs">
        <a href="pyspark-module.html">Package&nbsp;pyspark</a> ::
        Module&nbsp;conf
      </span>
    </td>
    <td>
      <table cellpadding="0" cellspacing="0">
        <!-- hide/show private -->
        <tr><td align="right"><span class="options"
            >[<a href="frames.html" target="_top">frames</a
            >]&nbsp;|&nbsp;<a href="pyspark.conf-pysrc.html"
            target="_top">no&nbsp;frames</a>]</span></td></tr>
      </table>
    </td>
  </tr>
</table>
<h1 class="epydoc">Source Code for <a href="pyspark.conf-module.html">Module pyspark.conf</a></h1>
<pre class="py-src">
<a name="L1"></a><tt class="py-lineno">  1</tt>  <tt class="py-line"><tt class="py-comment">#</tt> </tt>
<a name="L2"></a><tt class="py-lineno">  2</tt>  <tt class="py-line"><tt class="py-comment"># Licensed to the Apache Software Foundation (ASF) under one or more</tt> </tt>
<a name="L3"></a><tt class="py-lineno">  3</tt>  <tt class="py-line"><tt class="py-comment"># contributor license agreements.  See the NOTICE file distributed with</tt> </tt>
<a name="L4"></a><tt class="py-lineno">  4</tt>  <tt class="py-line"><tt class="py-comment"># this work for additional information regarding copyright ownership.</tt> </tt>
<a name="L5"></a><tt class="py-lineno">  5</tt>  <tt class="py-line"><tt class="py-comment"># The ASF licenses this file to You under the Apache License, Version 2.0</tt> </tt>
<a name="L6"></a><tt class="py-lineno">  6</tt>  <tt class="py-line"><tt class="py-comment"># (the "License"); you may not use this file except in compliance with</tt> </tt>
<a name="L7"></a><tt class="py-lineno">  7</tt>  <tt class="py-line"><tt class="py-comment"># the License.  You may obtain a copy of the License at</tt> </tt>
<a name="L8"></a><tt class="py-lineno">  8</tt>  <tt class="py-line"><tt class="py-comment">#</tt> </tt>
<a name="L9"></a><tt class="py-lineno">  9</tt>  <tt class="py-line"><tt class="py-comment">#    http://www.apache.org/licenses/LICENSE-2.0</tt> </tt>
<a name="L10"></a><tt class="py-lineno"> 10</tt>  <tt class="py-line"><tt class="py-comment">#</tt> </tt>
<a name="L11"></a><tt class="py-lineno"> 11</tt>  <tt class="py-line"><tt class="py-comment"># Unless required by applicable law or agreed to in writing, software</tt> </tt>
<a name="L12"></a><tt class="py-lineno"> 12</tt>  <tt class="py-line"><tt class="py-comment"># distributed under the License is distributed on an "AS IS" BASIS,</tt> </tt>
<a name="L13"></a><tt class="py-lineno"> 13</tt>  <tt class="py-line"><tt class="py-comment"># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</tt> </tt>
<a name="L14"></a><tt class="py-lineno"> 14</tt>  <tt class="py-line"><tt class="py-comment"># See the License for the specific language governing permissions and</tt> </tt>
<a name="L15"></a><tt class="py-lineno"> 15</tt>  <tt class="py-line"><tt class="py-comment"># limitations under the License.</tt> </tt>
<a name="L16"></a><tt class="py-lineno"> 16</tt>  <tt class="py-line"><tt class="py-comment">#</tt> </tt>
<a name="L17"></a><tt class="py-lineno"> 17</tt>  <tt class="py-line"> </tt>
<a name="L18"></a><tt class="py-lineno"> 18</tt>  <tt class="py-line"><tt class="py-docstring">"""</tt> </tt>
<a name="L19"></a><tt class="py-lineno"> 19</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; from pyspark.conf import SparkConf</tt> </tt>
<a name="L20"></a><tt class="py-lineno"> 20</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; from pyspark.context import SparkContext</tt> </tt>
<a name="L21"></a><tt class="py-lineno"> 21</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf = SparkConf()</tt> </tt>
<a name="L22"></a><tt class="py-lineno"> 22</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf.setMaster("local").setAppName("My app")</tt> </tt>
<a name="L23"></a><tt class="py-lineno"> 23</tt>  <tt class="py-line"><tt class="py-docstring">&lt;pyspark.conf.SparkConf object at ...&gt;</tt> </tt>
<a name="L24"></a><tt class="py-lineno"> 24</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf.get("spark.master")</tt> </tt>
<a name="L25"></a><tt class="py-lineno"> 25</tt>  <tt class="py-line"><tt class="py-docstring">u'local'</tt> </tt>
<a name="L26"></a><tt class="py-lineno"> 26</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf.get("spark.app.name")</tt> </tt>
<a name="L27"></a><tt class="py-lineno"> 27</tt>  <tt class="py-line"><tt class="py-docstring">u'My app'</tt> </tt>
<a name="L28"></a><tt class="py-lineno"> 28</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; sc = SparkContext(conf=conf)</tt> </tt>
<a name="L29"></a><tt class="py-lineno"> 29</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; sc.master</tt> </tt>
<a name="L30"></a><tt class="py-lineno"> 30</tt>  <tt class="py-line"><tt class="py-docstring">u'local'</tt> </tt>
<a name="L31"></a><tt class="py-lineno"> 31</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; sc.appName</tt> </tt>
<a name="L32"></a><tt class="py-lineno"> 32</tt>  <tt class="py-line"><tt class="py-docstring">u'My app'</tt> </tt>
<a name="L33"></a><tt class="py-lineno"> 33</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; sc.sparkHome == None</tt> </tt>
<a name="L34"></a><tt class="py-lineno"> 34</tt>  <tt class="py-line"><tt class="py-docstring">True</tt> </tt>
<a name="L35"></a><tt class="py-lineno"> 35</tt>  <tt class="py-line"><tt class="py-docstring"></tt> </tt>
<a name="L36"></a><tt class="py-lineno"> 36</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf = SparkConf()</tt> </tt>
<a name="L37"></a><tt class="py-lineno"> 37</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf.setSparkHome("/path")</tt> </tt>
<a name="L38"></a><tt class="py-lineno"> 38</tt>  <tt class="py-line"><tt class="py-docstring">&lt;pyspark.conf.SparkConf object at ...&gt;</tt> </tt>
<a name="L39"></a><tt class="py-lineno"> 39</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf.get("spark.home")</tt> </tt>
<a name="L40"></a><tt class="py-lineno"> 40</tt>  <tt class="py-line"><tt class="py-docstring">u'/path'</tt> </tt>
<a name="L41"></a><tt class="py-lineno"> 41</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf.setExecutorEnv("VAR1", "value1")</tt> </tt>
<a name="L42"></a><tt class="py-lineno"> 42</tt>  <tt class="py-line"><tt class="py-docstring">&lt;pyspark.conf.SparkConf object at ...&gt;</tt> </tt>
<a name="L43"></a><tt class="py-lineno"> 43</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf.setExecutorEnv(pairs = [("VAR3", "value3"), ("VAR4", "value4")])</tt> </tt>
<a name="L44"></a><tt class="py-lineno"> 44</tt>  <tt class="py-line"><tt class="py-docstring">&lt;pyspark.conf.SparkConf object at ...&gt;</tt> </tt>
<a name="L45"></a><tt class="py-lineno"> 45</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; conf.get("spark.executorEnv.VAR1")</tt> </tt>
<a name="L46"></a><tt class="py-lineno"> 46</tt>  <tt class="py-line"><tt class="py-docstring">u'value1'</tt> </tt>
<a name="L47"></a><tt class="py-lineno"> 47</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; print conf.toDebugString()</tt> </tt>
<a name="L48"></a><tt class="py-lineno"> 48</tt>  <tt class="py-line"><tt class="py-docstring">spark.executorEnv.VAR1=value1</tt> </tt>
<a name="L49"></a><tt class="py-lineno"> 49</tt>  <tt class="py-line"><tt class="py-docstring">spark.executorEnv.VAR3=value3</tt> </tt>
<a name="L50"></a><tt class="py-lineno"> 50</tt>  <tt class="py-line"><tt class="py-docstring">spark.executorEnv.VAR4=value4</tt> </tt>
<a name="L51"></a><tt class="py-lineno"> 51</tt>  <tt class="py-line"><tt class="py-docstring">spark.home=/path</tt> </tt>
<a name="L52"></a><tt class="py-lineno"> 52</tt>  <tt class="py-line"><tt class="py-docstring">&gt;&gt;&gt; sorted(conf.getAll(), key=lambda p: p[0])</tt> </tt>
<a name="L53"></a><tt class="py-lineno"> 53</tt>  <tt class="py-line"><tt class="py-docstring">[(u'spark.executorEnv.VAR1', u'value1'), (u'spark.executorEnv.VAR3', u'value3'), (u'spark.executorEnv.VAR4', u'value4'), (u'spark.home', u'/path')]</tt> </tt>
<a name="L54"></a><tt class="py-lineno"> 54</tt>  <tt class="py-line"><tt class="py-docstring">"""</tt> </tt>
<a name="L55"></a><tt class="py-lineno"> 55</tt>  <tt class="py-line"> </tt>
<a name="L56"></a><tt class="py-lineno"> 56</tt>  <tt class="py-line"> </tt>
<a name="SparkConf"></a><div id="SparkConf-def"><a name="L57"></a><tt class="py-lineno"> 57</tt> <a class="py-toggle" href="#" id="SparkConf-toggle" onclick="return toggle('SparkConf');">-</a><tt class="py-line"><tt class="py-keyword">class</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html">SparkConf</a><tt class="py-op">(</tt><tt class="py-base-class">object</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf-collapsed" style="display:none;" pad="+++" indent="++++"></div><div id="SparkConf-expanded"><a name="L58"></a><tt class="py-lineno"> 58</tt>  <tt class="py-line">    <tt class="py-docstring">"""</tt> </tt>
<a name="L59"></a><tt class="py-lineno"> 59</tt>  <tt class="py-line"><tt class="py-docstring">    Configuration for a Spark application. Used to set various Spark</tt> </tt>
<a name="L60"></a><tt class="py-lineno"> 60</tt>  <tt class="py-line"><tt class="py-docstring">    parameters as key-value pairs.</tt> </tt>
<a name="L61"></a><tt class="py-lineno"> 61</tt>  <tt class="py-line"><tt class="py-docstring"></tt> </tt>
<a name="L62"></a><tt class="py-lineno"> 62</tt>  <tt class="py-line"><tt class="py-docstring">    Most of the time, you would create a SparkConf object with</tt> </tt>
<a name="L63"></a><tt class="py-lineno"> 63</tt>  <tt class="py-line"><tt class="py-docstring">    C{SparkConf()}, which will load values from C{spark.*} Java system</tt> </tt>
<a name="L64"></a><tt class="py-lineno"> 64</tt>  <tt class="py-line"><tt class="py-docstring">    properties as well. In this case, any parameters you set directly on</tt> </tt>
<a name="L65"></a><tt class="py-lineno"> 65</tt>  <tt class="py-line"><tt class="py-docstring">    the C{SparkConf} object take priority over system properties.</tt> </tt>
<a name="L66"></a><tt class="py-lineno"> 66</tt>  <tt class="py-line"><tt class="py-docstring"></tt> </tt>
<a name="L67"></a><tt class="py-lineno"> 67</tt>  <tt class="py-line"><tt class="py-docstring">    For unit tests, you can also call C{SparkConf(false)} to skip</tt> </tt>
<a name="L68"></a><tt class="py-lineno"> 68</tt>  <tt class="py-line"><tt class="py-docstring">    loading external settings and get the same configuration no matter</tt> </tt>
<a name="L69"></a><tt class="py-lineno"> 69</tt>  <tt class="py-line"><tt class="py-docstring">    what the system properties are.</tt> </tt>
<a name="L70"></a><tt class="py-lineno"> 70</tt>  <tt class="py-line"><tt class="py-docstring"></tt> </tt>
<a name="L71"></a><tt class="py-lineno"> 71</tt>  <tt class="py-line"><tt class="py-docstring">    All setter methods in this class support chaining. For example,</tt> </tt>
<a name="L72"></a><tt class="py-lineno"> 72</tt>  <tt class="py-line"><tt class="py-docstring">    you can write C{conf.setMaster("local").setAppName("My app")}.</tt> </tt>
<a name="L73"></a><tt class="py-lineno"> 73</tt>  <tt class="py-line"><tt class="py-docstring"></tt> </tt>
<a name="L74"></a><tt class="py-lineno"> 74</tt>  <tt class="py-line"><tt class="py-docstring">    Note that once a SparkConf object is passed to Spark, it is cloned</tt> </tt>
<a name="L75"></a><tt class="py-lineno"> 75</tt>  <tt class="py-line"><tt class="py-docstring">    and can no longer be modified by the user.</tt> </tt>
<a name="L76"></a><tt class="py-lineno"> 76</tt>  <tt class="py-line"><tt class="py-docstring">    """</tt> </tt>
<a name="L77"></a><tt class="py-lineno"> 77</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.__init__"></a><div id="SparkConf.__init__-def"><a name="L78"></a><tt class="py-lineno"> 78</tt> <a class="py-toggle" href="#" id="SparkConf.__init__-toggle" onclick="return toggle('SparkConf.__init__');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#__init__">__init__</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">,</tt> <tt class="py-param">loadDefaults</tt><tt class="py-op">=</tt><tt class="py-name">True</tt><tt class="py-op">,</tt> <tt class="py-param">_jvm</tt><tt class="py-op">=</tt><tt class="py-name">None</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.__init__-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.__init__-expanded"><a name="L79"></a><tt class="py-lineno"> 79</tt>  <tt class="py-line">        <tt class="py-docstring">"""</tt> </tt>
<a name="L80"></a><tt class="py-lineno"> 80</tt>  <tt class="py-line"><tt class="py-docstring">        Create a new Spark configuration.</tt> </tt>
<a name="L81"></a><tt class="py-lineno"> 81</tt>  <tt class="py-line"><tt class="py-docstring"></tt> </tt>
<a name="L82"></a><tt class="py-lineno"> 82</tt>  <tt class="py-line"><tt class="py-docstring">        @param loadDefaults: whether to load values from Java system</tt> </tt>
<a name="L83"></a><tt class="py-lineno"> 83</tt>  <tt class="py-line"><tt class="py-docstring">               properties (True by default)</tt> </tt>
<a name="L84"></a><tt class="py-lineno"> 84</tt>  <tt class="py-line"><tt class="py-docstring">        @param _jvm: internal parameter used to pass a handle to the</tt> </tt>
<a name="L85"></a><tt class="py-lineno"> 85</tt>  <tt class="py-line"><tt class="py-docstring">               Java VM; does not need to be set by users</tt> </tt>
<a name="L86"></a><tt class="py-lineno"> 86</tt>  <tt class="py-line"><tt class="py-docstring">        """</tt> </tt>
<a name="L87"></a><tt class="py-lineno"> 87</tt>  <tt class="py-line">        <tt class="py-keyword">from</tt> <tt id="link-0" class="py-name" targets="Package pyspark=pyspark-module.html"><a title="pyspark" class="py-name" href="#" onclick="return doclink('link-0', 'pyspark', 'link-0');">pyspark</a></tt><tt class="py-op">.</tt><tt id="link-1" class="py-name" targets="Module pyspark.context=pyspark.context-module.html,Method pyspark.rdd.RDD.context()=pyspark.rdd.RDD-class.html#context"><a title="pyspark.context
pyspark.rdd.RDD.context" class="py-name" href="#" onclick="return doclink('link-1', 'context', 'link-1');">context</a></tt> <tt class="py-keyword">import</tt> <tt id="link-2" class="py-name" targets="Class pyspark.context.SparkContext=pyspark.context.SparkContext-class.html"><a title="pyspark.context.SparkContext" class="py-name" href="#" onclick="return doclink('link-2', 'SparkContext', 'link-2');">SparkContext</a></tt> </tt>
<a name="L88"></a><tt class="py-lineno"> 88</tt>  <tt class="py-line">        <tt id="link-3" class="py-name"><a title="pyspark.context.SparkContext" class="py-name" href="#" onclick="return doclink('link-3', 'SparkContext', 'link-2');">SparkContext</a></tt><tt class="py-op">.</tt><tt class="py-name">_ensure_initialized</tt><tt class="py-op">(</tt><tt class="py-op">)</tt> </tt>
<a name="L89"></a><tt class="py-lineno"> 89</tt>  <tt class="py-line">        <tt id="link-4" class="py-name" targets="Variable pyspark.context.SparkContext._jvm=pyspark.context.SparkContext-class.html#_jvm"><a title="pyspark.context.SparkContext._jvm" class="py-name" href="#" onclick="return doclink('link-4', '_jvm', 'link-4');">_jvm</a></tt> <tt class="py-op">=</tt> <tt id="link-5" class="py-name"><a title="pyspark.context.SparkContext._jvm" class="py-name" href="#" onclick="return doclink('link-5', '_jvm', 'link-4');">_jvm</a></tt> <tt class="py-keyword">or</tt> <tt id="link-6" class="py-name"><a title="pyspark.context.SparkContext" class="py-name" href="#" onclick="return doclink('link-6', 'SparkContext', 'link-2');">SparkContext</a></tt><tt class="py-op">.</tt><tt id="link-7" class="py-name"><a title="pyspark.context.SparkContext._jvm" class="py-name" href="#" onclick="return doclink('link-7', '_jvm', 'link-4');">_jvm</a></tt> </tt>
<a name="L90"></a><tt class="py-lineno"> 90</tt>  <tt class="py-line">        <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt> <tt class="py-op">=</tt> <tt id="link-8" class="py-name"><a title="pyspark.context.SparkContext._jvm" class="py-name" href="#" onclick="return doclink('link-8', '_jvm', 'link-4');">_jvm</a></tt><tt class="py-op">.</tt><tt id="link-9" class="py-name" targets="Class pyspark.conf.SparkConf=pyspark.conf.SparkConf-class.html"><a title="pyspark.conf.SparkConf" class="py-name" href="#" onclick="return doclink('link-9', 'SparkConf', 'link-9');">SparkConf</a></tt><tt class="py-op">(</tt><tt class="py-name">loadDefaults</tt><tt class="py-op">)</tt> </tt>
</div><a name="L91"></a><tt class="py-lineno"> 91</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.set"></a><div id="SparkConf.set-def"><a name="L92"></a><tt class="py-lineno"> 92</tt> <a class="py-toggle" href="#" id="SparkConf.set-toggle" onclick="return toggle('SparkConf.set');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#set">set</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">,</tt> <tt class="py-param">key</tt><tt class="py-op">,</tt> <tt class="py-param">value</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.set-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.set-expanded"><a name="L93"></a><tt class="py-lineno"> 93</tt>  <tt class="py-line">        <tt class="py-docstring">"""Set a configuration property."""</tt> </tt>
<a name="L94"></a><tt class="py-lineno"> 94</tt>  <tt class="py-line">        <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-10" class="py-name" targets="Method pyspark.conf.SparkConf.set()=pyspark.conf.SparkConf-class.html#set"><a title="pyspark.conf.SparkConf.set" class="py-name" href="#" onclick="return doclink('link-10', 'set', 'link-10');">set</a></tt><tt class="py-op">(</tt><tt class="py-name">key</tt><tt class="py-op">,</tt> <tt class="py-name">unicode</tt><tt class="py-op">(</tt><tt id="link-11" class="py-name" targets="Method pyspark.accumulators.Accumulator.value()=pyspark.accumulators.Accumulator-class.html#value"><a title="pyspark.accumulators.Accumulator.value" class="py-name" href="#" onclick="return doclink('link-11', 'value', 'link-11');">value</a></tt><tt class="py-op">)</tt><tt class="py-op">)</tt> </tt>
<a name="L95"></a><tt class="py-lineno"> 95</tt>  <tt class="py-line">        <tt class="py-keyword">return</tt> <tt class="py-name">self</tt> </tt>
</div><a name="L96"></a><tt class="py-lineno"> 96</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.setMaster"></a><div id="SparkConf.setMaster-def"><a name="L97"></a><tt class="py-lineno"> 97</tt> <a class="py-toggle" href="#" id="SparkConf.setMaster-toggle" onclick="return toggle('SparkConf.setMaster');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#setMaster">setMaster</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">,</tt> <tt class="py-param">value</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.setMaster-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.setMaster-expanded"><a name="L98"></a><tt class="py-lineno"> 98</tt>  <tt class="py-line">        <tt class="py-docstring">"""Set master URL to connect to."""</tt> </tt>
<a name="L99"></a><tt class="py-lineno"> 99</tt>  <tt class="py-line">        <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-12" class="py-name" targets="Method pyspark.conf.SparkConf.setMaster()=pyspark.conf.SparkConf-class.html#setMaster"><a title="pyspark.conf.SparkConf.setMaster" class="py-name" href="#" onclick="return doclink('link-12', 'setMaster', 'link-12');">setMaster</a></tt><tt class="py-op">(</tt><tt id="link-13" class="py-name"><a title="pyspark.accumulators.Accumulator.value" class="py-name" href="#" onclick="return doclink('link-13', 'value', 'link-11');">value</a></tt><tt class="py-op">)</tt> </tt>
<a name="L100"></a><tt class="py-lineno">100</tt>  <tt class="py-line">        <tt class="py-keyword">return</tt> <tt class="py-name">self</tt> </tt>
</div><a name="L101"></a><tt class="py-lineno">101</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.setAppName"></a><div id="SparkConf.setAppName-def"><a name="L102"></a><tt class="py-lineno">102</tt> <a class="py-toggle" href="#" id="SparkConf.setAppName-toggle" onclick="return toggle('SparkConf.setAppName');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#setAppName">setAppName</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">,</tt> <tt class="py-param">value</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.setAppName-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.setAppName-expanded"><a name="L103"></a><tt class="py-lineno">103</tt>  <tt class="py-line">        <tt class="py-docstring">"""Set application name."""</tt> </tt>
<a name="L104"></a><tt class="py-lineno">104</tt>  <tt class="py-line">        <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-14" class="py-name" targets="Method pyspark.conf.SparkConf.setAppName()=pyspark.conf.SparkConf-class.html#setAppName"><a title="pyspark.conf.SparkConf.setAppName" class="py-name" href="#" onclick="return doclink('link-14', 'setAppName', 'link-14');">setAppName</a></tt><tt class="py-op">(</tt><tt id="link-15" class="py-name"><a title="pyspark.accumulators.Accumulator.value" class="py-name" href="#" onclick="return doclink('link-15', 'value', 'link-11');">value</a></tt><tt class="py-op">)</tt> </tt>
<a name="L105"></a><tt class="py-lineno">105</tt>  <tt class="py-line">        <tt class="py-keyword">return</tt> <tt class="py-name">self</tt> </tt>
</div><a name="L106"></a><tt class="py-lineno">106</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.setSparkHome"></a><div id="SparkConf.setSparkHome-def"><a name="L107"></a><tt class="py-lineno">107</tt> <a class="py-toggle" href="#" id="SparkConf.setSparkHome-toggle" onclick="return toggle('SparkConf.setSparkHome');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#setSparkHome">setSparkHome</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">,</tt> <tt class="py-param">value</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.setSparkHome-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.setSparkHome-expanded"><a name="L108"></a><tt class="py-lineno">108</tt>  <tt class="py-line">        <tt class="py-docstring">"""Set path where Spark is installed on worker nodes."""</tt> </tt>
<a name="L109"></a><tt class="py-lineno">109</tt>  <tt class="py-line">        <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-16" class="py-name" targets="Method pyspark.conf.SparkConf.setSparkHome()=pyspark.conf.SparkConf-class.html#setSparkHome"><a title="pyspark.conf.SparkConf.setSparkHome" class="py-name" href="#" onclick="return doclink('link-16', 'setSparkHome', 'link-16');">setSparkHome</a></tt><tt class="py-op">(</tt><tt id="link-17" class="py-name"><a title="pyspark.accumulators.Accumulator.value" class="py-name" href="#" onclick="return doclink('link-17', 'value', 'link-11');">value</a></tt><tt class="py-op">)</tt> </tt>
<a name="L110"></a><tt class="py-lineno">110</tt>  <tt class="py-line">        <tt class="py-keyword">return</tt> <tt class="py-name">self</tt> </tt>
</div><a name="L111"></a><tt class="py-lineno">111</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.setExecutorEnv"></a><div id="SparkConf.setExecutorEnv-def"><a name="L112"></a><tt class="py-lineno">112</tt> <a class="py-toggle" href="#" id="SparkConf.setExecutorEnv-toggle" onclick="return toggle('SparkConf.setExecutorEnv');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#setExecutorEnv">setExecutorEnv</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">,</tt> <tt class="py-param">key</tt><tt class="py-op">=</tt><tt class="py-name">None</tt><tt class="py-op">,</tt> <tt class="py-param">value</tt><tt class="py-op">=</tt><tt class="py-name">None</tt><tt class="py-op">,</tt> <tt class="py-param">pairs</tt><tt class="py-op">=</tt><tt class="py-name">None</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.setExecutorEnv-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.setExecutorEnv-expanded"><a name="L113"></a><tt class="py-lineno">113</tt>  <tt class="py-line">        <tt class="py-docstring">"""Set an environment variable to be passed to executors."""</tt> </tt>
<a name="L114"></a><tt class="py-lineno">114</tt>  <tt class="py-line">        <tt class="py-keyword">if</tt> <tt class="py-op">(</tt><tt class="py-name">key</tt> <tt class="py-op">!=</tt> <tt class="py-name">None</tt> <tt class="py-keyword">and</tt> <tt class="py-name">pairs</tt> <tt class="py-op">!=</tt> <tt class="py-name">None</tt><tt class="py-op">)</tt> <tt class="py-keyword">or</tt> <tt class="py-op">(</tt><tt class="py-name">key</tt> <tt class="py-op">==</tt> <tt class="py-name">None</tt> <tt class="py-keyword">and</tt> <tt class="py-name">pairs</tt> <tt class="py-op">==</tt> <tt class="py-name">None</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
<a name="L115"></a><tt class="py-lineno">115</tt>  <tt class="py-line">            <tt class="py-keyword">raise</tt> <tt class="py-name">Exception</tt><tt class="py-op">(</tt><tt class="py-string">"Either pass one key-value pair or a list of pairs"</tt><tt class="py-op">)</tt> </tt>
<a name="L116"></a><tt class="py-lineno">116</tt>  <tt class="py-line">        <tt class="py-keyword">elif</tt> <tt class="py-name">key</tt> <tt class="py-op">!=</tt> <tt class="py-name">None</tt><tt class="py-op">:</tt> </tt>
<a name="L117"></a><tt class="py-lineno">117</tt>  <tt class="py-line">            <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-18" class="py-name" targets="Method pyspark.conf.SparkConf.setExecutorEnv()=pyspark.conf.SparkConf-class.html#setExecutorEnv"><a title="pyspark.conf.SparkConf.setExecutorEnv" class="py-name" href="#" onclick="return doclink('link-18', 'setExecutorEnv', 'link-18');">setExecutorEnv</a></tt><tt class="py-op">(</tt><tt class="py-name">key</tt><tt class="py-op">,</tt> <tt id="link-19" class="py-name"><a title="pyspark.accumulators.Accumulator.value" class="py-name" href="#" onclick="return doclink('link-19', 'value', 'link-11');">value</a></tt><tt class="py-op">)</tt> </tt>
<a name="L118"></a><tt class="py-lineno">118</tt>  <tt class="py-line">        <tt class="py-keyword">elif</tt> <tt class="py-name">pairs</tt> <tt class="py-op">!=</tt> <tt class="py-name">None</tt><tt class="py-op">:</tt> </tt>
<a name="L119"></a><tt class="py-lineno">119</tt>  <tt class="py-line">            <tt class="py-keyword">for</tt> <tt class="py-op">(</tt><tt class="py-name">k</tt><tt class="py-op">,</tt> <tt class="py-name">v</tt><tt class="py-op">)</tt> <tt class="py-keyword">in</tt> <tt class="py-name">pairs</tt><tt class="py-op">:</tt> </tt>
<a name="L120"></a><tt class="py-lineno">120</tt>  <tt class="py-line">                <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-20" class="py-name"><a title="pyspark.conf.SparkConf.setExecutorEnv" class="py-name" href="#" onclick="return doclink('link-20', 'setExecutorEnv', 'link-18');">setExecutorEnv</a></tt><tt class="py-op">(</tt><tt class="py-name">k</tt><tt class="py-op">,</tt> <tt class="py-name">v</tt><tt class="py-op">)</tt> </tt>
<a name="L121"></a><tt class="py-lineno">121</tt>  <tt class="py-line">        <tt class="py-keyword">return</tt> <tt class="py-name">self</tt> </tt>
</div><a name="L122"></a><tt class="py-lineno">122</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.setAll"></a><div id="SparkConf.setAll-def"><a name="L123"></a><tt class="py-lineno">123</tt> <a class="py-toggle" href="#" id="SparkConf.setAll-toggle" onclick="return toggle('SparkConf.setAll');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#setAll">setAll</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">,</tt> <tt class="py-param">pairs</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.setAll-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.setAll-expanded"><a name="L124"></a><tt class="py-lineno">124</tt>  <tt class="py-line">        <tt class="py-docstring">"""</tt> </tt>
<a name="L125"></a><tt class="py-lineno">125</tt>  <tt class="py-line"><tt class="py-docstring">        Set multiple parameters, passed as a list of key-value pairs.</tt> </tt>
<a name="L126"></a><tt class="py-lineno">126</tt>  <tt class="py-line"><tt class="py-docstring"></tt> </tt>
<a name="L127"></a><tt class="py-lineno">127</tt>  <tt class="py-line"><tt class="py-docstring">        @param pairs: list of key-value pairs to set</tt> </tt>
<a name="L128"></a><tt class="py-lineno">128</tt>  <tt class="py-line"><tt class="py-docstring">        """</tt> </tt>
<a name="L129"></a><tt class="py-lineno">129</tt>  <tt class="py-line">        <tt class="py-keyword">for</tt> <tt class="py-op">(</tt><tt class="py-name">k</tt><tt class="py-op">,</tt> <tt class="py-name">v</tt><tt class="py-op">)</tt> <tt class="py-keyword">in</tt> <tt class="py-name">pairs</tt><tt class="py-op">:</tt> </tt>
<a name="L130"></a><tt class="py-lineno">130</tt>  <tt class="py-line">            <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-21" class="py-name"><a title="pyspark.conf.SparkConf.set" class="py-name" href="#" onclick="return doclink('link-21', 'set', 'link-10');">set</a></tt><tt class="py-op">(</tt><tt class="py-name">k</tt><tt class="py-op">,</tt> <tt class="py-name">v</tt><tt class="py-op">)</tt> </tt>
<a name="L131"></a><tt class="py-lineno">131</tt>  <tt class="py-line">        <tt class="py-keyword">return</tt> <tt class="py-name">self</tt> </tt>
</div><a name="L132"></a><tt class="py-lineno">132</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.get"></a><div id="SparkConf.get-def"><a name="L133"></a><tt class="py-lineno">133</tt> <a class="py-toggle" href="#" id="SparkConf.get-toggle" onclick="return toggle('SparkConf.get');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#get">get</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">,</tt> <tt class="py-param">key</tt><tt class="py-op">,</tt> <tt class="py-param">defaultValue</tt><tt class="py-op">=</tt><tt class="py-name">None</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.get-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.get-expanded"><a name="L134"></a><tt class="py-lineno">134</tt>  <tt class="py-line">        <tt class="py-docstring">"""Get the configured value for some key, or return a default otherwise."""</tt> </tt>
<a name="L135"></a><tt class="py-lineno">135</tt>  <tt class="py-line">        <tt class="py-keyword">if</tt> <tt class="py-name">defaultValue</tt> <tt class="py-op">==</tt> <tt class="py-name">None</tt><tt class="py-op">:</tt>   <tt class="py-comment"># Py4J doesn't call the right get() if we pass None</tt> </tt>
<a name="L136"></a><tt class="py-lineno">136</tt>  <tt class="py-line">            <tt class="py-keyword">if</tt> <tt class="py-keyword">not</tt> <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-22" class="py-name" targets="Method pyspark.conf.SparkConf.contains()=pyspark.conf.SparkConf-class.html#contains"><a title="pyspark.conf.SparkConf.contains" class="py-name" href="#" onclick="return doclink('link-22', 'contains', 'link-22');">contains</a></tt><tt class="py-op">(</tt><tt class="py-name">key</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
<a name="L137"></a><tt class="py-lineno">137</tt>  <tt class="py-line">                <tt class="py-keyword">return</tt> <tt class="py-name">None</tt> </tt>
<a name="L138"></a><tt class="py-lineno">138</tt>  <tt class="py-line">            <tt class="py-keyword">return</tt> <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-23" class="py-name" targets="Method pyspark.conf.SparkConf.get()=pyspark.conf.SparkConf-class.html#get,Class Method pyspark.files.SparkFiles.get()=pyspark.files.SparkFiles-class.html#get"><a title="pyspark.conf.SparkConf.get
pyspark.files.SparkFiles.get" class="py-name" href="#" onclick="return doclink('link-23', 'get', 'link-23');">get</a></tt><tt class="py-op">(</tt><tt class="py-name">key</tt><tt class="py-op">)</tt> </tt>
<a name="L139"></a><tt class="py-lineno">139</tt>  <tt class="py-line">        <tt class="py-keyword">else</tt><tt class="py-op">:</tt> </tt>
<a name="L140"></a><tt class="py-lineno">140</tt>  <tt class="py-line">            <tt class="py-keyword">return</tt> <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-24" class="py-name"><a title="pyspark.conf.SparkConf.get
pyspark.files.SparkFiles.get" class="py-name" href="#" onclick="return doclink('link-24', 'get', 'link-23');">get</a></tt><tt class="py-op">(</tt><tt class="py-name">key</tt><tt class="py-op">,</tt> <tt class="py-name">defaultValue</tt><tt class="py-op">)</tt> </tt>
</div><a name="L141"></a><tt class="py-lineno">141</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.getAll"></a><div id="SparkConf.getAll-def"><a name="L142"></a><tt class="py-lineno">142</tt> <a class="py-toggle" href="#" id="SparkConf.getAll-toggle" onclick="return toggle('SparkConf.getAll');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#getAll">getAll</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.getAll-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.getAll-expanded"><a name="L143"></a><tt class="py-lineno">143</tt>  <tt class="py-line">        <tt class="py-docstring">"""Get all values as a list of key-value pairs."""</tt> </tt>
<a name="L144"></a><tt class="py-lineno">144</tt>  <tt class="py-line">        <tt class="py-name">pairs</tt> <tt class="py-op">=</tt> <tt class="py-op">[</tt><tt class="py-op">]</tt> </tt>
<a name="L145"></a><tt class="py-lineno">145</tt>  <tt class="py-line">        <tt class="py-keyword">for</tt> <tt class="py-name">elem</tt> <tt class="py-keyword">in</tt> <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-25" class="py-name" targets="Method pyspark.conf.SparkConf.getAll()=pyspark.conf.SparkConf-class.html#getAll"><a title="pyspark.conf.SparkConf.getAll" class="py-name" href="#" onclick="return doclink('link-25', 'getAll', 'link-25');">getAll</a></tt><tt class="py-op">(</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
<a name="L146"></a><tt class="py-lineno">146</tt>  <tt class="py-line">            <tt class="py-name">pairs</tt><tt class="py-op">.</tt><tt class="py-name">append</tt><tt class="py-op">(</tt><tt class="py-op">(</tt><tt class="py-name">elem</tt><tt class="py-op">.</tt><tt class="py-name">_1</tt><tt class="py-op">(</tt><tt class="py-op">)</tt><tt class="py-op">,</tt> <tt class="py-name">elem</tt><tt class="py-op">.</tt><tt class="py-name">_2</tt><tt class="py-op">(</tt><tt class="py-op">)</tt><tt class="py-op">)</tt><tt class="py-op">)</tt> </tt>
<a name="L147"></a><tt class="py-lineno">147</tt>  <tt class="py-line">        <tt class="py-keyword">return</tt> <tt class="py-name">pairs</tt> </tt>
</div><a name="L148"></a><tt class="py-lineno">148</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.contains"></a><div id="SparkConf.contains-def"><a name="L149"></a><tt class="py-lineno">149</tt> <a class="py-toggle" href="#" id="SparkConf.contains-toggle" onclick="return toggle('SparkConf.contains');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#contains">contains</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">,</tt> <tt class="py-param">key</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.contains-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.contains-expanded"><a name="L150"></a><tt class="py-lineno">150</tt>  <tt class="py-line">        <tt class="py-docstring">"""Does this configuration contain a given key?"""</tt> </tt>
<a name="L151"></a><tt class="py-lineno">151</tt>  <tt class="py-line">        <tt class="py-keyword">return</tt> <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-26" class="py-name"><a title="pyspark.conf.SparkConf.contains" class="py-name" href="#" onclick="return doclink('link-26', 'contains', 'link-22');">contains</a></tt><tt class="py-op">(</tt><tt class="py-name">key</tt><tt class="py-op">)</tt> </tt>
</div><a name="L152"></a><tt class="py-lineno">152</tt>  <tt class="py-line"> </tt>
<a name="SparkConf.toDebugString"></a><div id="SparkConf.toDebugString-def"><a name="L153"></a><tt class="py-lineno">153</tt> <a class="py-toggle" href="#" id="SparkConf.toDebugString-toggle" onclick="return toggle('SparkConf.toDebugString');">-</a><tt class="py-line">    <tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf.SparkConf-class.html#toDebugString">toDebugString</a><tt class="py-op">(</tt><tt class="py-param">self</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="SparkConf.toDebugString-collapsed" style="display:none;" pad="+++" indent="++++++++"></div><div id="SparkConf.toDebugString-expanded"><a name="L154"></a><tt class="py-lineno">154</tt>  <tt class="py-line">        <tt class="py-docstring">"""</tt> </tt>
<a name="L155"></a><tt class="py-lineno">155</tt>  <tt class="py-line"><tt class="py-docstring">        Returns a printable version of the configuration, as a list of</tt> </tt>
<a name="L156"></a><tt class="py-lineno">156</tt>  <tt class="py-line"><tt class="py-docstring">        key=value pairs, one per line.</tt> </tt>
<a name="L157"></a><tt class="py-lineno">157</tt>  <tt class="py-line"><tt class="py-docstring">        """</tt> </tt>
<a name="L158"></a><tt class="py-lineno">158</tt>  <tt class="py-line">        <tt class="py-keyword">return</tt> <tt class="py-name">self</tt><tt class="py-op">.</tt><tt class="py-name">_jconf</tt><tt class="py-op">.</tt><tt id="link-27" class="py-name" targets="Method pyspark.conf.SparkConf.toDebugString()=pyspark.conf.SparkConf-class.html#toDebugString"><a title="pyspark.conf.SparkConf.toDebugString" class="py-name" href="#" onclick="return doclink('link-27', 'toDebugString', 'link-27');">toDebugString</a></tt><tt class="py-op">(</tt><tt class="py-op">)</tt> </tt>
</div></div><a name="L159"></a><tt class="py-lineno">159</tt>  <tt class="py-line"> </tt>
<a name="L160"></a><tt class="py-lineno">160</tt>  <tt class="py-line"> </tt>
<a name="_test"></a><div id="_test-def"><a name="L161"></a><tt class="py-lineno">161</tt> <a class="py-toggle" href="#" id="_test-toggle" onclick="return toggle('_test');">-</a><tt class="py-line"><tt class="py-keyword">def</tt> <a class="py-def-name" href="pyspark.conf-module.html#_test">_test</a><tt class="py-op">(</tt><tt class="py-op">)</tt><tt class="py-op">:</tt> </tt>
</div><div id="_test-collapsed" style="display:none;" pad="+++" indent="++++"></div><div id="_test-expanded"><a name="L162"></a><tt class="py-lineno">162</tt>  <tt class="py-line">    <tt class="py-keyword">import</tt> <tt class="py-name">doctest</tt> </tt>
<a name="L163"></a><tt class="py-lineno">163</tt>  <tt class="py-line">    <tt class="py-op">(</tt><tt class="py-name">failure_count</tt><tt class="py-op">,</tt> <tt class="py-name">test_count</tt><tt class="py-op">)</tt> <tt class="py-op">=</tt> <tt class="py-name">doctest</tt><tt class="py-op">.</tt><tt class="py-name">testmod</tt><tt class="py-op">(</tt><tt class="py-name">optionflags</tt><tt class="py-op">=</tt><tt class="py-name">doctest</tt><tt class="py-op">.</tt><tt class="py-name">ELLIPSIS</tt><tt class="py-op">)</tt> </tt>
<a name="L164"></a><tt class="py-lineno">164</tt>  <tt class="py-line">    <tt class="py-keyword">if</tt> <tt class="py-name">failure_count</tt><tt class="py-op">:</tt> </tt>
<a name="L165"></a><tt class="py-lineno">165</tt>  <tt class="py-line">        <tt class="py-name">exit</tt><tt class="py-op">(</tt><tt class="py-op">-</tt><tt class="py-number">1</tt><tt class="py-op">)</tt> </tt>
</div><a name="L166"></a><tt class="py-lineno">166</tt>  <tt class="py-line"> </tt>
<a name="L167"></a><tt class="py-lineno">167</tt>  <tt class="py-line"> </tt>
<a name="L168"></a><tt class="py-lineno">168</tt>  <tt class="py-line"><tt class="py-keyword">if</tt> <tt class="py-name">__name__</tt> <tt class="py-op">==</tt> <tt class="py-string">"__main__"</tt><tt class="py-op">:</tt> </tt>
<a name="L169"></a><tt class="py-lineno">169</tt>  <tt class="py-line">    <tt class="py-name">_test</tt><tt class="py-op">(</tt><tt class="py-op">)</tt> </tt>
<a name="L170"></a><tt class="py-lineno">170</tt>  <tt class="py-line"> </tt><script type="text/javascript">
<!--
expandto(location.href);
// -->
</script>
</pre>
<br />
<!-- ==================== NAVIGATION BAR ==================== -->
<table class="navbar" border="0" width="100%" cellpadding="0"
       bgcolor="#a0c0ff" cellspacing="0">
  <tr valign="middle">
  <!-- Home link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="pyspark-module.html">Home</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Tree link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="module-tree.html">Trees</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Index link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="identifier-index.html">Indices</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Help link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="help.html">Help</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Project homepage -->
      <th class="navbar" align="right" width="100%">
        <table border="0" cellpadding="0" cellspacing="0">
          <tr><th class="navbar" align="center"
            ><a class="navbar" target="_top" href="http://spark-project.org">PySpark</a></th>
          </tr></table></th>
  </tr>
</table>
<table border="0" cellpadding="0" cellspacing="0" width="100%%">
  <tr>
    <td align="left" class="footer">
    Generated by Epydoc 3.0.1 on Sun Mar  2 16:35:00 2014
    </td>
    <td align="right" class="footer">
      <a target="mainFrame" href="http://epydoc.sourceforge.net"
        >http://epydoc.sourceforge.net</a>
    </td>
  </tr>
</table>

<script type="text/javascript">
  <!--
  // Private objects are initially displayed (because if
  // javascript is turned off then we want them to be
  // visible); but by default, we want to hide them.  So hide
  // them unless we have a cookie that says to show them.
  checkCookie();
  // -->
</script>
</body>
</html>