summaryrefslogtreecommitdiff
path: root/site/docs/1.0.1/api/python/class-tree.html
blob: 9c44ea8be2a17f14c590b025eb30bff7e8a55264 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
<?xml version="1.0" encoding="ascii"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
          "DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
  <title>Class Hierarchy</title>
  <link rel="stylesheet" href="epydoc.css" type="text/css" />
  <script type="text/javascript" src="epydoc.js"></script>
</head>

<body bgcolor="white" text="black" link="blue" vlink="#204080"
      alink="#204080">
<!-- ==================== NAVIGATION BAR ==================== -->
<table class="navbar" border="0" width="100%" cellpadding="0"
       bgcolor="#a0c0ff" cellspacing="0">
  <tr valign="middle">
  <!-- Home link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="pyspark-module.html">Home</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Tree link -->
      <th bgcolor="#70b0f0" class="navbar-select"
          >&nbsp;&nbsp;&nbsp;Trees&nbsp;&nbsp;&nbsp;</th>

  <!-- Index link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="identifier-index.html">Indices</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Help link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="help.html">Help</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Project homepage -->
      <th class="navbar" align="right" width="100%">
        <table border="0" cellpadding="0" cellspacing="0">
          <tr><th class="navbar" align="center"
            ><a class="navbar" target="_top" href="http://spark.apache.org">Spark 1.0.0 Python API Docs</a></th>
          </tr></table></th>
  </tr>
</table>
<table width="100%" cellpadding="0" cellspacing="0">
  <tr valign="top">
    <td width="100%">&nbsp;</td>
    <td>
      <table cellpadding="0" cellspacing="0">
        <!-- hide/show private -->
        <tr><td align="right"><span class="options"
            >[<a href="frames.html" target="_top">frames</a
            >]&nbsp;|&nbsp;<a href="class-tree.html"
            target="_top">no&nbsp;frames</a>]</span></td></tr>
      </table>
    </td>
  </tr>
</table>
<center><b>
 [ <a href="module-tree.html">Module Hierarchy</a>
 | <a href="class-tree.html">Class Hierarchy</a> ]
</b></center><br />
<h1 class="epydoc">Class Hierarchy</h1>
<ul class="nomargin-top">
    <li> <strong class="uidlink">collections.Iterable</strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.util.MLUtils-class.html">pyspark.mllib.util.MLUtils</a></strong>:
      <em class="summary">Helper methods to load, save and pre-process data used in MLlib.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.sql.SQLContext-class.html">pyspark.sql.SQLContext</a></strong>:
      <em class="summary">Main entry point for SparkSQL functionality.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.storagelevel.StorageLevel-class.html">pyspark.storagelevel.StorageLevel</a></strong>:
      <em class="summary">Flags for controlling the storage of an RDD.</em>
    </li>
    <li> <strong class="uidlink">object</strong>:
      <em class="summary">The most base type</em>
    <ul>
    <li> <strong class="uidlink"><a href="pyspark.mllib.recommendation.ALS-class.html">pyspark.mllib.recommendation.ALS</a></strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.accumulators.Accumulator-class.html">pyspark.accumulators.Accumulator</a></strong>:
      <em class="summary">A shared variable that can be accumulated, i.e., has a commutative 
        and associative &quot;add&quot; operation.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.accumulators.AccumulatorParam-class.html">pyspark.accumulators.AccumulatorParam</a></strong>:
      <em class="summary">Helper object that defines how to accumulate values of a given 
        type.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.broadcast.Broadcast-class.html">pyspark.broadcast.Broadcast</a></strong>:
      <em class="summary">A broadcast variable created with <a 
        href="pyspark.context.SparkContext-class.html#broadcast" 
        class="link">SparkContext.broadcast()</a>.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.clustering.KMeans-class.html">pyspark.mllib.clustering.KMeans</a></strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.clustering.KMeansModel-class.html">pyspark.mllib.clustering.KMeansModel</a></strong>:
      <em class="summary">A clustering model derived from the k-means method.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.regression.LabeledPoint-class.html">pyspark.mllib.regression.LabeledPoint</a></strong>:
      <em class="summary">The features and labels of a data point.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.regression.LassoWithSGD-class.html">pyspark.mllib.regression.LassoWithSGD</a></strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.regression.LinearModel-class.html">pyspark.mllib.regression.LinearModel</a></strong>:
      <em class="summary">A linear model that has a vector of coefficients and an intercept.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.regression.LinearRegressionWithSGD-class.html">pyspark.mllib.regression.LinearRegressionWithSGD</a></strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.classification.LogisticRegressionWithSGD-class.html">pyspark.mllib.classification.LogisticRegressionWithSGD</a></strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.recommendation.MatrixFactorizationModel-class.html">pyspark.mllib.recommendation.MatrixFactorizationModel</a></strong>:
      <em class="summary">A matrix factorisation model trained by regularized alternating 
        least-squares.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.classification.NaiveBayes-class.html">pyspark.mllib.classification.NaiveBayes</a></strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.classification.NaiveBayesModel-class.html">pyspark.mllib.classification.NaiveBayesModel</a></strong>:
      <em class="summary">Model for Naive Bayes classifiers.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.rdd.RDD-class.html">pyspark.rdd.RDD</a></strong>:
      <em class="summary">A Resilient Distributed Dataset (RDD), the basic abstraction in 
        Spark.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.regression.RidgeRegressionWithSGD-class.html">pyspark.mllib.regression.RidgeRegressionWithSGD</a></strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.classification.SVMWithSGD-class.html">pyspark.mllib.classification.SVMWithSGD</a></strong>
    </li>
    <li> <strong class="uidlink">pyspark.serializers.Serializer</strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.conf.SparkConf-class.html">pyspark.conf.SparkConf</a></strong>:
      <em class="summary">Configuration for a Spark application.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.context.SparkContext-class.html">pyspark.context.SparkContext</a></strong>:
      <em class="summary">Main entry point for Spark functionality.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.files.SparkFiles-class.html">pyspark.files.SparkFiles</a></strong>:
      <em class="summary">Resolves paths to files added through <a 
        href="pyspark.context.SparkContext-class.html#addFile" 
        class="link">SparkContext.addFile()</a>.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.linalg.SparseVector-class.html">pyspark.mllib.linalg.SparseVector</a></strong>:
      <em class="summary">A simple sparse vector class for passing data to MLlib.</em>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.statcounter.StatCounter-class.html">pyspark.statcounter.StatCounter</a></strong>
    </li>
    <li> <strong class="uidlink"><a href="pyspark.mllib.linalg.Vectors-class.html">pyspark.mllib.linalg.Vectors</a></strong>:
      <em class="summary">Factory methods for working with vectors.</em>
    </li>
    <li> <strong class="uidlink">dict</strong>:
      <em class="summary">dict() -&gt; new empty dictionary
dict(mapping) -&gt; new dictionary initialized from a mapping object's
    (key, value) pairs
dict(iterable) -&gt; new dictionary initialized as if via:
    d = {}
    for k, v in iterable:
        d[k] = v
dict(**kwargs) -&gt; new dictionary initialized with the name=value pairs
    in the keyword argument list.</em>
    </li>
    </ul>
    </li>
</ul>
<!-- ==================== NAVIGATION BAR ==================== -->
<table class="navbar" border="0" width="100%" cellpadding="0"
       bgcolor="#a0c0ff" cellspacing="0">
  <tr valign="middle">
  <!-- Home link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="pyspark-module.html">Home</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Tree link -->
      <th bgcolor="#70b0f0" class="navbar-select"
          >&nbsp;&nbsp;&nbsp;Trees&nbsp;&nbsp;&nbsp;</th>

  <!-- Index link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="identifier-index.html">Indices</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Help link -->
      <th>&nbsp;&nbsp;&nbsp;<a
        href="help.html">Help</a>&nbsp;&nbsp;&nbsp;</th>

  <!-- Project homepage -->
      <th class="navbar" align="right" width="100%">
        <table border="0" cellpadding="0" cellspacing="0">
          <tr><th class="navbar" align="center"
            ><a class="navbar" target="_top" href="http://spark.apache.org">Spark 1.0.0 Python API Docs</a></th>
          </tr></table></th>
  </tr>
</table>
<table border="0" cellpadding="0" cellspacing="0" width="100%%">
  <tr>
    <td align="left" class="footer">
    Generated by Epydoc 3.0.1 on Fri Jul  4 18:52:26 2014
    </td>
    <td align="right" class="footer">
      <a target="mainFrame" href="http://epydoc.sourceforge.net"
        >http://epydoc.sourceforge.net</a>
    </td>
  </tr>
</table>

<script type="text/javascript">
  <!--
  // Private objects are initially displayed (because if
  // javascript is turned off then we want them to be
  // visible); but by default, we want to hide them.  So hide
  // them unless we have a cookie that says to show them.
  checkCookie();
  // -->
</script>
</body>
</html>