summaryrefslogtreecommitdiff
path: root/site/docs/1.5.0/api/python/_modules/pyspark/streaming/context.html
blob: 5b3cea514629efdae50dd9dfe7bc73716ed3a8b9 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">


<html xmlns="http://www.w3.org/1999/xhtml">
  <head>
    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
    
    <title>pyspark.streaming.context &mdash; PySpark 1.5.0 documentation</title>
    
    <link rel="stylesheet" href="../../../_static/nature.css" type="text/css" />
    <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
    
    <script type="text/javascript">
      var DOCUMENTATION_OPTIONS = {
        URL_ROOT:    '../../../',
        VERSION:     '1.5.0',
        COLLAPSE_INDEX: false,
        FILE_SUFFIX: '.html',
        HAS_SOURCE:  true
      };
    </script>
    <script type="text/javascript" src="../../../_static/jquery.js"></script>
    <script type="text/javascript" src="../../../_static/underscore.js"></script>
    <script type="text/javascript" src="../../../_static/doctools.js"></script>
    <link rel="top" title="PySpark 1.5.0 documentation" href="../../../index.html" />
    <link rel="up" title="Module code" href="../../index.html" /> 
  </head>
  <body role="document">
    <div class="related" role="navigation" aria-label="related navigation">
      <h3>Navigation</h3>
      <ul>
        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 1.5.0 documentation</a> &raquo;</li>
          <li class="nav-item nav-item-1"><a href="../../index.html" accesskey="U">Module code</a> &raquo;</li> 
      </ul>
    </div>  

    <div class="document">
      <div class="documentwrapper">
        <div class="bodywrapper">
          <div class="body" role="main">
            
  <h1>Source code for pyspark.streaming.context</h1><div class="highlight"><pre>
<span class="c">#</span>
<span class="c"># Licensed to the Apache Software Foundation (ASF) under one or more</span>
<span class="c"># contributor license agreements.  See the NOTICE file distributed with</span>
<span class="c"># this work for additional information regarding copyright ownership.</span>
<span class="c"># The ASF licenses this file to You under the Apache License, Version 2.0</span>
<span class="c"># (the &quot;License&quot;); you may not use this file except in compliance with</span>
<span class="c"># the License.  You may obtain a copy of the License at</span>
<span class="c">#</span>
<span class="c">#    http://www.apache.org/licenses/LICENSE-2.0</span>
<span class="c">#</span>
<span class="c"># Unless required by applicable law or agreed to in writing, software</span>
<span class="c"># distributed under the License is distributed on an &quot;AS IS&quot; BASIS,</span>
<span class="c"># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
<span class="c"># See the License for the specific language governing permissions and</span>
<span class="c"># limitations under the License.</span>
<span class="c">#</span>

<span class="kn">from</span> <span class="nn">__future__</span> <span class="kn">import</span> <span class="n">print_function</span>

<span class="kn">import</span> <span class="nn">os</span>
<span class="kn">import</span> <span class="nn">sys</span>

<span class="kn">from</span> <span class="nn">py4j.java_gateway</span> <span class="kn">import</span> <span class="n">java_import</span><span class="p">,</span> <span class="n">JavaObject</span>

<span class="kn">from</span> <span class="nn">pyspark</span> <span class="kn">import</span> <span class="n">RDD</span><span class="p">,</span> <span class="n">SparkConf</span>
<span class="kn">from</span> <span class="nn">pyspark.serializers</span> <span class="kn">import</span> <span class="n">NoOpSerializer</span><span class="p">,</span> <span class="n">UTF8Deserializer</span><span class="p">,</span> <span class="n">CloudPickleSerializer</span>
<span class="kn">from</span> <span class="nn">pyspark.context</span> <span class="kn">import</span> <span class="n">SparkContext</span>
<span class="kn">from</span> <span class="nn">pyspark.storagelevel</span> <span class="kn">import</span> <span class="n">StorageLevel</span>
<span class="kn">from</span> <span class="nn">pyspark.streaming.dstream</span> <span class="kn">import</span> <span class="n">DStream</span>
<span class="kn">from</span> <span class="nn">pyspark.streaming.util</span> <span class="kn">import</span> <span class="n">TransformFunction</span><span class="p">,</span> <span class="n">TransformFunctionSerializer</span>

<span class="n">__all__</span> <span class="o">=</span> <span class="p">[</span><span class="s">&quot;StreamingContext&quot;</span><span class="p">]</span>


<span class="k">def</span> <span class="nf">_daemonize_callback_server</span><span class="p">():</span>
    <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">    Hack Py4J to daemonize callback server</span>

<span class="sd">    The thread of callback server has daemon=False, it will block the driver</span>
<span class="sd">    from exiting if it&#39;s not shutdown. The following code replace `start()`</span>
<span class="sd">    of CallbackServer with a new version, which set daemon=True for this</span>
<span class="sd">    thread.</span>

<span class="sd">    Also, it will update the port number (0) with real port</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="c"># TODO: create a patch for Py4J</span>
    <span class="kn">import</span> <span class="nn">socket</span>
    <span class="kn">import</span> <span class="nn">py4j.java_gateway</span>
    <span class="n">logger</span> <span class="o">=</span> <span class="n">py4j</span><span class="o">.</span><span class="n">java_gateway</span><span class="o">.</span><span class="n">logger</span>
    <span class="kn">from</span> <span class="nn">py4j.java_gateway</span> <span class="kn">import</span> <span class="n">Py4JNetworkError</span>
    <span class="kn">from</span> <span class="nn">threading</span> <span class="kn">import</span> <span class="n">Thread</span>

    <span class="k">def</span> <span class="nf">start</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;Starts the CallbackServer. This method should be called by the</span>
<span class="sd">        client instead of run().&quot;&quot;&quot;</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">server_socket</span> <span class="o">=</span> <span class="n">socket</span><span class="o">.</span><span class="n">socket</span><span class="p">(</span><span class="n">socket</span><span class="o">.</span><span class="n">AF_INET</span><span class="p">,</span> <span class="n">socket</span><span class="o">.</span><span class="n">SOCK_STREAM</span><span class="p">)</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">server_socket</span><span class="o">.</span><span class="n">setsockopt</span><span class="p">(</span><span class="n">socket</span><span class="o">.</span><span class="n">SOL_SOCKET</span><span class="p">,</span> <span class="n">socket</span><span class="o">.</span><span class="n">SO_REUSEADDR</span><span class="p">,</span>
                                      <span class="mi">1</span><span class="p">)</span>
        <span class="k">try</span><span class="p">:</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">server_socket</span><span class="o">.</span><span class="n">bind</span><span class="p">((</span><span class="bp">self</span><span class="o">.</span><span class="n">address</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">port</span><span class="p">))</span>
            <span class="k">if</span> <span class="ow">not</span> <span class="bp">self</span><span class="o">.</span><span class="n">port</span><span class="p">:</span>
                <span class="c"># update port with real port</span>
                <span class="bp">self</span><span class="o">.</span><span class="n">port</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">server_socket</span><span class="o">.</span><span class="n">getsockname</span><span class="p">()[</span><span class="mi">1</span><span class="p">]</span>
        <span class="k">except</span> <span class="ne">Exception</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
            <span class="n">msg</span> <span class="o">=</span> <span class="s">&#39;An error occurred while trying to start the callback server: </span><span class="si">%s</span><span class="s">&#39;</span> <span class="o">%</span> <span class="n">e</span>
            <span class="n">logger</span><span class="o">.</span><span class="n">exception</span><span class="p">(</span><span class="n">msg</span><span class="p">)</span>
            <span class="k">raise</span> <span class="n">Py4JNetworkError</span><span class="p">(</span><span class="n">msg</span><span class="p">)</span>

        <span class="c"># Maybe thread needs to be cleanup up?</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">thread</span> <span class="o">=</span> <span class="n">Thread</span><span class="p">(</span><span class="n">target</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">run</span><span class="p">)</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">thread</span><span class="o">.</span><span class="n">daemon</span> <span class="o">=</span> <span class="bp">True</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">thread</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>

    <span class="n">py4j</span><span class="o">.</span><span class="n">java_gateway</span><span class="o">.</span><span class="n">CallbackServer</span><span class="o">.</span><span class="n">start</span> <span class="o">=</span> <span class="n">start</span>


<div class="viewcode-block" id="StreamingContext"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext">[docs]</a><span class="k">class</span> <span class="nc">StreamingContext</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
    <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">    Main entry point for Spark Streaming functionality. A StreamingContext</span>
<span class="sd">    represents the connection to a Spark cluster, and can be used to create</span>
<span class="sd">    L{DStream} various input sources. It can be from an existing L{SparkContext}.</span>
<span class="sd">    After creating and transforming DStreams, the streaming computation can</span>
<span class="sd">    be started and stopped using `context.start()` and `context.stop()`,</span>
<span class="sd">    respectively. `context.awaitTermination()` allows the current thread</span>
<span class="sd">    to wait for the termination of the context by `stop()` or by an exception.</span>
<span class="sd">    &quot;&quot;&quot;</span>
    <span class="n">_transformerSerializer</span> <span class="o">=</span> <span class="bp">None</span>

    <span class="c"># Reference to a currently active StreamingContext</span>
    <span class="n">_activeContext</span> <span class="o">=</span> <span class="bp">None</span>

    <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">sparkContext</span><span class="p">,</span> <span class="n">batchDuration</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span> <span class="n">jssc</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Create a new StreamingContext.</span>

<span class="sd">        @param sparkContext: L{SparkContext} object.</span>
<span class="sd">        @param batchDuration: the time interval (in seconds) at which streaming</span>
<span class="sd">                              data will be divided into batches</span>
<span class="sd">        &quot;&quot;&quot;</span>

        <span class="bp">self</span><span class="o">.</span><span class="n">_sc</span> <span class="o">=</span> <span class="n">sparkContext</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_jvm</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_sc</span><span class="o">.</span><span class="n">_jvm</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span> <span class="o">=</span> <span class="n">jssc</span> <span class="ow">or</span> <span class="bp">self</span><span class="o">.</span><span class="n">_initialize_context</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_sc</span><span class="p">,</span> <span class="n">batchDuration</span><span class="p">)</span>

    <span class="k">def</span> <span class="nf">_initialize_context</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">sc</span><span class="p">,</span> <span class="n">duration</span><span class="p">):</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_ensure_initialized</span><span class="p">()</span>
        <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jvm</span><span class="o">.</span><span class="n">JavaStreamingContext</span><span class="p">(</span><span class="n">sc</span><span class="o">.</span><span class="n">_jsc</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jduration</span><span class="p">(</span><span class="n">duration</span><span class="p">))</span>

    <span class="k">def</span> <span class="nf">_jduration</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">seconds</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Create Duration object given number of seconds</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jvm</span><span class="o">.</span><span class="n">Duration</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">seconds</span> <span class="o">*</span> <span class="mi">1000</span><span class="p">))</span>

    <span class="nd">@classmethod</span>
    <span class="k">def</span> <span class="nf">_ensure_initialized</span><span class="p">(</span><span class="n">cls</span><span class="p">):</span>
        <span class="n">SparkContext</span><span class="o">.</span><span class="n">_ensure_initialized</span><span class="p">()</span>
        <span class="n">gw</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_gateway</span>

        <span class="n">java_import</span><span class="p">(</span><span class="n">gw</span><span class="o">.</span><span class="n">jvm</span><span class="p">,</span> <span class="s">&quot;org.apache.spark.streaming.*&quot;</span><span class="p">)</span>
        <span class="n">java_import</span><span class="p">(</span><span class="n">gw</span><span class="o">.</span><span class="n">jvm</span><span class="p">,</span> <span class="s">&quot;org.apache.spark.streaming.api.java.*&quot;</span><span class="p">)</span>
        <span class="n">java_import</span><span class="p">(</span><span class="n">gw</span><span class="o">.</span><span class="n">jvm</span><span class="p">,</span> <span class="s">&quot;org.apache.spark.streaming.api.python.*&quot;</span><span class="p">)</span>

        <span class="c"># start callback server</span>
        <span class="c"># getattr will fallback to JVM, so we cannot test by hasattr()</span>
        <span class="k">if</span> <span class="s">&quot;_callback_server&quot;</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">gw</span><span class="o">.</span><span class="n">__dict__</span><span class="p">:</span>
            <span class="n">_daemonize_callback_server</span><span class="p">()</span>
            <span class="c"># use random port</span>
            <span class="n">gw</span><span class="o">.</span><span class="n">_start_callback_server</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
            <span class="c"># gateway with real port</span>
            <span class="n">gw</span><span class="o">.</span><span class="n">_python_proxy_port</span> <span class="o">=</span> <span class="n">gw</span><span class="o">.</span><span class="n">_callback_server</span><span class="o">.</span><span class="n">port</span>
            <span class="c"># get the GatewayServer object in JVM by ID</span>
            <span class="n">jgws</span> <span class="o">=</span> <span class="n">JavaObject</span><span class="p">(</span><span class="s">&quot;GATEWAY_SERVER&quot;</span><span class="p">,</span> <span class="n">gw</span><span class="o">.</span><span class="n">_gateway_client</span><span class="p">)</span>
            <span class="c"># update the port of CallbackClient with real port</span>
            <span class="n">gw</span><span class="o">.</span><span class="n">jvm</span><span class="o">.</span><span class="n">PythonDStream</span><span class="o">.</span><span class="n">updatePythonGatewayPort</span><span class="p">(</span><span class="n">jgws</span><span class="p">,</span> <span class="n">gw</span><span class="o">.</span><span class="n">_python_proxy_port</span><span class="p">)</span>

        <span class="c"># register serializer for TransformFunction</span>
        <span class="c"># it happens before creating SparkContext when loading from checkpointing</span>
        <span class="n">cls</span><span class="o">.</span><span class="n">_transformerSerializer</span> <span class="o">=</span> <span class="n">TransformFunctionSerializer</span><span class="p">(</span>
            <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span><span class="p">,</span> <span class="n">CloudPickleSerializer</span><span class="p">(),</span> <span class="n">gw</span><span class="p">)</span>

    <span class="nd">@classmethod</span>
<div class="viewcode-block" id="StreamingContext.getOrCreate"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.getOrCreate">[docs]</a>    <span class="k">def</span> <span class="nf">getOrCreate</span><span class="p">(</span><span class="n">cls</span><span class="p">,</span> <span class="n">checkpointPath</span><span class="p">,</span> <span class="n">setupFunc</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Either recreate a StreamingContext from checkpoint data or create a new StreamingContext.</span>
<span class="sd">        If checkpoint data exists in the provided `checkpointPath`, then StreamingContext will be</span>
<span class="sd">        recreated from the checkpoint data. If the data does not exist, then the provided setupFunc</span>
<span class="sd">        will be used to create a new context.</span>

<span class="sd">        @param checkpointPath: Checkpoint directory used in an earlier streaming program</span>
<span class="sd">        @param setupFunc:      Function to create a new context and setup DStreams</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="n">cls</span><span class="o">.</span><span class="n">_ensure_initialized</span><span class="p">()</span>
        <span class="n">gw</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_gateway</span>

        <span class="c"># Check whether valid checkpoint information exists in the given path</span>
        <span class="k">if</span> <span class="n">gw</span><span class="o">.</span><span class="n">jvm</span><span class="o">.</span><span class="n">CheckpointReader</span><span class="o">.</span><span class="n">read</span><span class="p">(</span><span class="n">checkpointPath</span><span class="p">)</span><span class="o">.</span><span class="n">isEmpty</span><span class="p">():</span>
            <span class="n">ssc</span> <span class="o">=</span> <span class="n">setupFunc</span><span class="p">()</span>
            <span class="n">ssc</span><span class="o">.</span><span class="n">checkpoint</span><span class="p">(</span><span class="n">checkpointPath</span><span class="p">)</span>
            <span class="k">return</span> <span class="n">ssc</span>

        <span class="k">try</span><span class="p">:</span>
            <span class="n">jssc</span> <span class="o">=</span> <span class="n">gw</span><span class="o">.</span><span class="n">jvm</span><span class="o">.</span><span class="n">JavaStreamingContext</span><span class="p">(</span><span class="n">checkpointPath</span><span class="p">)</span>
        <span class="k">except</span> <span class="ne">Exception</span><span class="p">:</span>
            <span class="k">print</span><span class="p">(</span><span class="s">&quot;failed to load StreamingContext from checkpoint&quot;</span><span class="p">,</span> <span class="nb">file</span><span class="o">=</span><span class="n">sys</span><span class="o">.</span><span class="n">stderr</span><span class="p">)</span>
            <span class="k">raise</span>

        <span class="c"># If there is already an active instance of Python SparkContext use it, or create a new one</span>
        <span class="k">if</span> <span class="ow">not</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span><span class="p">:</span>
            <span class="n">jsc</span> <span class="o">=</span> <span class="n">jssc</span><span class="o">.</span><span class="n">sparkContext</span><span class="p">()</span>
            <span class="n">conf</span> <span class="o">=</span> <span class="n">SparkConf</span><span class="p">(</span><span class="n">_jconf</span><span class="o">=</span><span class="n">jsc</span><span class="o">.</span><span class="n">getConf</span><span class="p">())</span>
            <span class="n">SparkContext</span><span class="p">(</span><span class="n">conf</span><span class="o">=</span><span class="n">conf</span><span class="p">,</span> <span class="n">gateway</span><span class="o">=</span><span class="n">gw</span><span class="p">,</span> <span class="n">jsc</span><span class="o">=</span><span class="n">jsc</span><span class="p">)</span>

        <span class="n">sc</span> <span class="o">=</span> <span class="n">SparkContext</span><span class="o">.</span><span class="n">_active_spark_context</span>

        <span class="c"># update ctx in serializer</span>
        <span class="n">cls</span><span class="o">.</span><span class="n">_transformerSerializer</span><span class="o">.</span><span class="n">ctx</span> <span class="o">=</span> <span class="n">sc</span>
        <span class="k">return</span> <span class="n">StreamingContext</span><span class="p">(</span><span class="n">sc</span><span class="p">,</span> <span class="bp">None</span><span class="p">,</span> <span class="n">jssc</span><span class="p">)</span>
</div>
    <span class="nd">@classmethod</span>
<div class="viewcode-block" id="StreamingContext.getActive"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.getActive">[docs]</a>    <span class="k">def</span> <span class="nf">getActive</span><span class="p">(</span><span class="n">cls</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Return either the currently active StreamingContext (i.e., if there is a context started</span>
<span class="sd">        but not stopped) or None.</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="n">activePythonContext</span> <span class="o">=</span> <span class="n">cls</span><span class="o">.</span><span class="n">_activeContext</span>
        <span class="k">if</span> <span class="n">activePythonContext</span> <span class="ow">is</span> <span class="ow">not</span> <span class="bp">None</span><span class="p">:</span>
            <span class="c"># Verify that the current running Java StreamingContext is active and is the same one</span>
            <span class="c"># backing the supposedly active Python context</span>
            <span class="n">activePythonContextJavaId</span> <span class="o">=</span> <span class="n">activePythonContext</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">ssc</span><span class="p">()</span><span class="o">.</span><span class="n">hashCode</span><span class="p">()</span>
            <span class="n">activeJvmContextOption</span> <span class="o">=</span> <span class="n">activePythonContext</span><span class="o">.</span><span class="n">_jvm</span><span class="o">.</span><span class="n">StreamingContext</span><span class="o">.</span><span class="n">getActive</span><span class="p">()</span>

            <span class="k">if</span> <span class="n">activeJvmContextOption</span><span class="o">.</span><span class="n">isEmpty</span><span class="p">():</span>
                <span class="n">cls</span><span class="o">.</span><span class="n">_activeContext</span> <span class="o">=</span> <span class="bp">None</span>
            <span class="k">elif</span> <span class="n">activeJvmContextOption</span><span class="o">.</span><span class="n">get</span><span class="p">()</span><span class="o">.</span><span class="n">hashCode</span><span class="p">()</span> <span class="o">!=</span> <span class="n">activePythonContextJavaId</span><span class="p">:</span>
                <span class="n">cls</span><span class="o">.</span><span class="n">_activeContext</span> <span class="o">=</span> <span class="bp">None</span>
                <span class="k">raise</span> <span class="ne">Exception</span><span class="p">(</span><span class="s">&quot;JVM&#39;s active JavaStreamingContext is not the JavaStreamingContext &quot;</span>
                                <span class="s">&quot;backing the action Python StreamingContext. This is unexpected.&quot;</span><span class="p">)</span>
        <span class="k">return</span> <span class="n">cls</span><span class="o">.</span><span class="n">_activeContext</span>
</div>
    <span class="nd">@classmethod</span>
<div class="viewcode-block" id="StreamingContext.getActiveOrCreate"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.getActiveOrCreate">[docs]</a>    <span class="k">def</span> <span class="nf">getActiveOrCreate</span><span class="p">(</span><span class="n">cls</span><span class="p">,</span> <span class="n">checkpointPath</span><span class="p">,</span> <span class="n">setupFunc</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Either return the active StreamingContext (i.e. currently started but not stopped),</span>
<span class="sd">        or recreate a StreamingContext from checkpoint data or create a new StreamingContext</span>
<span class="sd">        using the provided setupFunc function. If the checkpointPath is None or does not contain</span>
<span class="sd">        valid checkpoint data, then setupFunc will be called to create a new context and setup</span>
<span class="sd">        DStreams.</span>

<span class="sd">        @param checkpointPath: Checkpoint directory used in an earlier streaming program. Can be</span>
<span class="sd">                               None if the intention is to always create a new context when there</span>
<span class="sd">                               is no active context.</span>
<span class="sd">        @param setupFunc:      Function to create a new JavaStreamingContext and setup DStreams</span>
<span class="sd">        &quot;&quot;&quot;</span>

        <span class="k">if</span> <span class="n">setupFunc</span> <span class="ow">is</span> <span class="bp">None</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">Exception</span><span class="p">(</span><span class="s">&quot;setupFunc cannot be None&quot;</span><span class="p">)</span>
        <span class="n">activeContext</span> <span class="o">=</span> <span class="n">cls</span><span class="o">.</span><span class="n">getActive</span><span class="p">()</span>
        <span class="k">if</span> <span class="n">activeContext</span> <span class="ow">is</span> <span class="ow">not</span> <span class="bp">None</span><span class="p">:</span>
            <span class="k">return</span> <span class="n">activeContext</span>
        <span class="k">elif</span> <span class="n">checkpointPath</span> <span class="ow">is</span> <span class="ow">not</span> <span class="bp">None</span><span class="p">:</span>
            <span class="k">return</span> <span class="n">cls</span><span class="o">.</span><span class="n">getOrCreate</span><span class="p">(</span><span class="n">checkpointPath</span><span class="p">,</span> <span class="n">setupFunc</span><span class="p">)</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="k">return</span> <span class="n">setupFunc</span><span class="p">()</span>
</div>
    <span class="nd">@property</span>
    <span class="k">def</span> <span class="nf">sparkContext</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Return SparkContext which is associated with this StreamingContext.</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">_sc</span>

<div class="viewcode-block" id="StreamingContext.start"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.start">[docs]</a>    <span class="k">def</span> <span class="nf">start</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Start the execution of the streams.</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">start</span><span class="p">()</span>
        <span class="n">StreamingContext</span><span class="o">.</span><span class="n">_activeContext</span> <span class="o">=</span> <span class="bp">self</span>
</div>
<div class="viewcode-block" id="StreamingContext.awaitTermination"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.awaitTermination">[docs]</a>    <span class="k">def</span> <span class="nf">awaitTermination</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">timeout</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Wait for the execution to stop.</span>
<span class="sd">        @param timeout: time to wait in seconds</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="k">if</span> <span class="n">timeout</span> <span class="ow">is</span> <span class="bp">None</span><span class="p">:</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">awaitTermination</span><span class="p">()</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">awaitTerminationOrTimeout</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">timeout</span> <span class="o">*</span> <span class="mi">1000</span><span class="p">))</span>
</div>
<div class="viewcode-block" id="StreamingContext.awaitTerminationOrTimeout"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.awaitTerminationOrTimeout">[docs]</a>    <span class="k">def</span> <span class="nf">awaitTerminationOrTimeout</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">timeout</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Wait for the execution to stop. Return `true` if it&#39;s stopped; or</span>
<span class="sd">        throw the reported error during the execution; or `false` if the</span>
<span class="sd">        waiting time elapsed before returning from the method.</span>
<span class="sd">        @param timeout: time to wait in seconds</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">awaitTerminationOrTimeout</span><span class="p">(</span><span class="nb">int</span><span class="p">(</span><span class="n">timeout</span> <span class="o">*</span> <span class="mi">1000</span><span class="p">))</span>
</div>
<div class="viewcode-block" id="StreamingContext.stop"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.stop">[docs]</a>    <span class="k">def</span> <span class="nf">stop</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">stopSparkContext</span><span class="o">=</span><span class="bp">True</span><span class="p">,</span> <span class="n">stopGraceFully</span><span class="o">=</span><span class="bp">False</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Stop the execution of the streams, with option of ensuring all</span>
<span class="sd">        received data has been processed.</span>

<span class="sd">        @param stopSparkContext: Stop the associated SparkContext or not</span>
<span class="sd">        @param stopGracefully: Stop gracefully by waiting for the processing</span>
<span class="sd">                              of all received data to be completed</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">stop</span><span class="p">(</span><span class="n">stopSparkContext</span><span class="p">,</span> <span class="n">stopGraceFully</span><span class="p">)</span>
        <span class="n">StreamingContext</span><span class="o">.</span><span class="n">_activeContext</span> <span class="o">=</span> <span class="bp">None</span>
        <span class="k">if</span> <span class="n">stopSparkContext</span><span class="p">:</span>
            <span class="bp">self</span><span class="o">.</span><span class="n">_sc</span><span class="o">.</span><span class="n">stop</span><span class="p">()</span>
</div>
<div class="viewcode-block" id="StreamingContext.remember"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.remember">[docs]</a>    <span class="k">def</span> <span class="nf">remember</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">duration</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Set each DStreams in this context to remember RDDs it generated</span>
<span class="sd">        in the last given duration. DStreams remember RDDs only for a</span>
<span class="sd">        limited duration of time and releases them for garbage collection.</span>
<span class="sd">        This method allows the developer to specify how to long to remember</span>
<span class="sd">        the RDDs (if the developer wishes to query old data outside the</span>
<span class="sd">        DStream computation).</span>

<span class="sd">        @param duration: Minimum duration (in seconds) that each DStream</span>
<span class="sd">                        should remember its RDDs</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">remember</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_jduration</span><span class="p">(</span><span class="n">duration</span><span class="p">))</span>
</div>
<div class="viewcode-block" id="StreamingContext.checkpoint"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.checkpoint">[docs]</a>    <span class="k">def</span> <span class="nf">checkpoint</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">directory</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Sets the context to periodically checkpoint the DStream operations for master</span>
<span class="sd">        fault-tolerance. The graph will be checkpointed every batch interval.</span>

<span class="sd">        @param directory: HDFS-compatible directory where the checkpoint data</span>
<span class="sd">                         will be reliably stored</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">checkpoint</span><span class="p">(</span><span class="n">directory</span><span class="p">)</span>
</div>
<div class="viewcode-block" id="StreamingContext.socketTextStream"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.socketTextStream">[docs]</a>    <span class="k">def</span> <span class="nf">socketTextStream</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hostname</span><span class="p">,</span> <span class="n">port</span><span class="p">,</span> <span class="n">storageLevel</span><span class="o">=</span><span class="n">StorageLevel</span><span class="o">.</span><span class="n">MEMORY_AND_DISK_SER_2</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Create an input from TCP source hostname:port. Data is received using</span>
<span class="sd">        a TCP socket and receive byte is interpreted as UTF8 encoded ``\\n`` delimited</span>
<span class="sd">        lines.</span>

<span class="sd">        @param hostname:      Hostname to connect to for receiving data</span>
<span class="sd">        @param port:          Port to connect to for receiving data</span>
<span class="sd">        @param storageLevel:  Storage level to use for storing the received objects</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="n">jlevel</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_sc</span><span class="o">.</span><span class="n">_getJavaStorageLevel</span><span class="p">(</span><span class="n">storageLevel</span><span class="p">)</span>
        <span class="k">return</span> <span class="n">DStream</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">socketTextStream</span><span class="p">(</span><span class="n">hostname</span><span class="p">,</span> <span class="n">port</span><span class="p">,</span> <span class="n">jlevel</span><span class="p">),</span> <span class="bp">self</span><span class="p">,</span>
                       <span class="n">UTF8Deserializer</span><span class="p">())</span>
</div>
<div class="viewcode-block" id="StreamingContext.textFileStream"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.textFileStream">[docs]</a>    <span class="k">def</span> <span class="nf">textFileStream</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">directory</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Create an input stream that monitors a Hadoop-compatible file system</span>
<span class="sd">        for new files and reads them as text files. Files must be wrriten to the</span>
<span class="sd">        monitored directory by &quot;moving&quot; them from another location within the same</span>
<span class="sd">        file system. File names starting with . are ignored.</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="k">return</span> <span class="n">DStream</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">textFileStream</span><span class="p">(</span><span class="n">directory</span><span class="p">),</span> <span class="bp">self</span><span class="p">,</span> <span class="n">UTF8Deserializer</span><span class="p">())</span>
</div>
<div class="viewcode-block" id="StreamingContext.binaryRecordsStream"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.binaryRecordsStream">[docs]</a>    <span class="k">def</span> <span class="nf">binaryRecordsStream</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">directory</span><span class="p">,</span> <span class="n">recordLength</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Create an input stream that monitors a Hadoop-compatible file system</span>
<span class="sd">        for new files and reads them as flat binary files with records of</span>
<span class="sd">        fixed length. Files must be written to the monitored directory by &quot;moving&quot;</span>
<span class="sd">        them from another location within the same file system.</span>
<span class="sd">        File names starting with . are ignored.</span>

<span class="sd">        @param directory:       Directory to load data from</span>
<span class="sd">        @param recordLength:    Length of each record in bytes</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="k">return</span> <span class="n">DStream</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">binaryRecordsStream</span><span class="p">(</span><span class="n">directory</span><span class="p">,</span> <span class="n">recordLength</span><span class="p">),</span> <span class="bp">self</span><span class="p">,</span>
                       <span class="n">NoOpSerializer</span><span class="p">())</span>
</div>
    <span class="k">def</span> <span class="nf">_check_serializers</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">rdds</span><span class="p">):</span>
        <span class="c"># make sure they have same serializer</span>
        <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="nb">set</span><span class="p">(</span><span class="n">rdd</span><span class="o">.</span><span class="n">_jrdd_deserializer</span> <span class="k">for</span> <span class="n">rdd</span> <span class="ow">in</span> <span class="n">rdds</span><span class="p">))</span> <span class="o">&gt;</span> <span class="mi">1</span><span class="p">:</span>
            <span class="k">for</span> <span class="n">i</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="nb">len</span><span class="p">(</span><span class="n">rdds</span><span class="p">)):</span>
                <span class="c"># reset them to sc.serializer</span>
                <span class="n">rdds</span><span class="p">[</span><span class="n">i</span><span class="p">]</span> <span class="o">=</span> <span class="n">rdds</span><span class="p">[</span><span class="n">i</span><span class="p">]</span><span class="o">.</span><span class="n">_reserialize</span><span class="p">()</span>

<div class="viewcode-block" id="StreamingContext.queueStream"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.queueStream">[docs]</a>    <span class="k">def</span> <span class="nf">queueStream</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">rdds</span><span class="p">,</span> <span class="n">oneAtATime</span><span class="o">=</span><span class="bp">True</span><span class="p">,</span> <span class="n">default</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Create an input stream from an queue of RDDs or list. In each batch,</span>
<span class="sd">        it will process either one or all of the RDDs returned by the queue.</span>

<span class="sd">        NOTE: changes to the queue after the stream is created will not be recognized.</span>

<span class="sd">        @param rdds:       Queue of RDDs</span>
<span class="sd">        @param oneAtATime: pick one rdd each time or pick all of them once.</span>
<span class="sd">        @param default:    The default rdd if no more in rdds</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="k">if</span> <span class="n">default</span> <span class="ow">and</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">default</span><span class="p">,</span> <span class="n">RDD</span><span class="p">):</span>
            <span class="n">default</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">(</span><span class="n">default</span><span class="p">)</span>

        <span class="k">if</span> <span class="ow">not</span> <span class="n">rdds</span> <span class="ow">and</span> <span class="n">default</span><span class="p">:</span>
            <span class="n">rdds</span> <span class="o">=</span> <span class="p">[</span><span class="n">rdds</span><span class="p">]</span>

        <span class="k">if</span> <span class="n">rdds</span> <span class="ow">and</span> <span class="ow">not</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">rdds</span><span class="p">[</span><span class="mi">0</span><span class="p">],</span> <span class="n">RDD</span><span class="p">):</span>
            <span class="n">rdds</span> <span class="o">=</span> <span class="p">[</span><span class="bp">self</span><span class="o">.</span><span class="n">_sc</span><span class="o">.</span><span class="n">parallelize</span><span class="p">(</span><span class="nb">input</span><span class="p">)</span> <span class="k">for</span> <span class="nb">input</span> <span class="ow">in</span> <span class="n">rdds</span><span class="p">]</span>
        <span class="bp">self</span><span class="o">.</span><span class="n">_check_serializers</span><span class="p">(</span><span class="n">rdds</span><span class="p">)</span>

        <span class="n">queue</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jvm</span><span class="o">.</span><span class="n">PythonDStream</span><span class="o">.</span><span class="n">toRDDQueue</span><span class="p">([</span><span class="n">r</span><span class="o">.</span><span class="n">_jrdd</span> <span class="k">for</span> <span class="n">r</span> <span class="ow">in</span> <span class="n">rdds</span><span class="p">])</span>
        <span class="k">if</span> <span class="n">default</span><span class="p">:</span>
            <span class="n">default</span> <span class="o">=</span> <span class="n">default</span><span class="o">.</span><span class="n">_reserialize</span><span class="p">(</span><span class="n">rdds</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">_jrdd_deserializer</span><span class="p">)</span>
            <span class="n">jdstream</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">queueStream</span><span class="p">(</span><span class="n">queue</span><span class="p">,</span> <span class="n">oneAtATime</span><span class="p">,</span> <span class="n">default</span><span class="o">.</span><span class="n">_jrdd</span><span class="p">)</span>
        <span class="k">else</span><span class="p">:</span>
            <span class="n">jdstream</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">queueStream</span><span class="p">(</span><span class="n">queue</span><span class="p">,</span> <span class="n">oneAtATime</span><span class="p">)</span>
        <span class="k">return</span> <span class="n">DStream</span><span class="p">(</span><span class="n">jdstream</span><span class="p">,</span> <span class="bp">self</span><span class="p">,</span> <span class="n">rdds</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">_jrdd_deserializer</span><span class="p">)</span>
</div>
<div class="viewcode-block" id="StreamingContext.transform"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.transform">[docs]</a>    <span class="k">def</span> <span class="nf">transform</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">dstreams</span><span class="p">,</span> <span class="n">transformFunc</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Create a new DStream in which each RDD is generated by applying</span>
<span class="sd">        a function on RDDs of the DStreams. The order of the JavaRDDs in</span>
<span class="sd">        the transform function parameter will be the same as the order</span>
<span class="sd">        of corresponding DStreams in the list.</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="n">jdstreams</span> <span class="o">=</span> <span class="p">[</span><span class="n">d</span><span class="o">.</span><span class="n">_jdstream</span> <span class="k">for</span> <span class="n">d</span> <span class="ow">in</span> <span class="n">dstreams</span><span class="p">]</span>
        <span class="c"># change the final serializer to sc.serializer</span>
        <span class="n">func</span> <span class="o">=</span> <span class="n">TransformFunction</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_sc</span><span class="p">,</span>
                                 <span class="k">lambda</span> <span class="n">t</span><span class="p">,</span> <span class="o">*</span><span class="n">rdds</span><span class="p">:</span> <span class="n">transformFunc</span><span class="p">(</span><span class="n">rdds</span><span class="p">)</span><span class="o">.</span><span class="n">map</span><span class="p">(</span><span class="k">lambda</span> <span class="n">x</span><span class="p">:</span> <span class="n">x</span><span class="p">),</span>
                                 <span class="o">*</span><span class="p">[</span><span class="n">d</span><span class="o">.</span><span class="n">_jrdd_deserializer</span> <span class="k">for</span> <span class="n">d</span> <span class="ow">in</span> <span class="n">dstreams</span><span class="p">])</span>
        <span class="n">jfunc</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jvm</span><span class="o">.</span><span class="n">TransformFunction</span><span class="p">(</span><span class="n">func</span><span class="p">)</span>
        <span class="n">jdstream</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">transform</span><span class="p">(</span><span class="n">jdstreams</span><span class="p">,</span> <span class="n">jfunc</span><span class="p">)</span>
        <span class="k">return</span> <span class="n">DStream</span><span class="p">(</span><span class="n">jdstream</span><span class="p">,</span> <span class="bp">self</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">_sc</span><span class="o">.</span><span class="n">serializer</span><span class="p">)</span>
</div>
<div class="viewcode-block" id="StreamingContext.union"><a class="viewcode-back" href="../../../pyspark.streaming.html#pyspark.streaming.StreamingContext.union">[docs]</a>    <span class="k">def</span> <span class="nf">union</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="o">*</span><span class="n">dstreams</span><span class="p">):</span>
        <span class="sd">&quot;&quot;&quot;</span>
<span class="sd">        Create a unified DStream from multiple DStreams of the same</span>
<span class="sd">        type and same slide duration.</span>
<span class="sd">        &quot;&quot;&quot;</span>
        <span class="k">if</span> <span class="ow">not</span> <span class="n">dstreams</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s">&quot;should have at least one DStream to union&quot;</span><span class="p">)</span>
        <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">dstreams</span><span class="p">)</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
            <span class="k">return</span> <span class="n">dstreams</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
        <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="nb">set</span><span class="p">(</span><span class="n">s</span><span class="o">.</span><span class="n">_jrdd_deserializer</span> <span class="k">for</span> <span class="n">s</span> <span class="ow">in</span> <span class="n">dstreams</span><span class="p">))</span> <span class="o">&gt;</span> <span class="mi">1</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s">&quot;All DStreams should have same serializer&quot;</span><span class="p">)</span>
        <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="nb">set</span><span class="p">(</span><span class="n">s</span><span class="o">.</span><span class="n">_slideDuration</span> <span class="k">for</span> <span class="n">s</span> <span class="ow">in</span> <span class="n">dstreams</span><span class="p">))</span> <span class="o">&gt;</span> <span class="mi">1</span><span class="p">:</span>
            <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s">&quot;All DStreams should have same slide duration&quot;</span><span class="p">)</span>
        <span class="n">first</span> <span class="o">=</span> <span class="n">dstreams</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
        <span class="n">jrest</span> <span class="o">=</span> <span class="p">[</span><span class="n">d</span><span class="o">.</span><span class="n">_jdstream</span> <span class="k">for</span> <span class="n">d</span> <span class="ow">in</span> <span class="n">dstreams</span><span class="p">[</span><span class="mi">1</span><span class="p">:]]</span>
        <span class="k">return</span> <span class="n">DStream</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">_jssc</span><span class="o">.</span><span class="n">union</span><span class="p">(</span><span class="n">first</span><span class="o">.</span><span class="n">_jdstream</span><span class="p">,</span> <span class="n">jrest</span><span class="p">),</span> <span class="bp">self</span><span class="p">,</span> <span class="n">first</span><span class="o">.</span><span class="n">_jrdd_deserializer</span><span class="p">)</span></div></div>
</pre></div>

          </div>
        </div>
      </div>
      <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
        <div class="sphinxsidebarwrapper">
            <p class="logo"><a href="../../../index.html">
              <img class="logo" src="../../../_static/spark-logo-hd.png" alt="Logo"/>
            </a></p>
<div id="searchbox" style="display: none" role="search">
  <h3>Quick search</h3>
    <form class="search" action="../../../search.html" method="get">
      <input type="text" name="q" />
      <input type="submit" value="Go" />
      <input type="hidden" name="check_keywords" value="yes" />
      <input type="hidden" name="area" value="default" />
    </form>
    <p class="searchtip" style="font-size: 90%">
    Enter search terms or a module, class or function name.
    </p>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
        </div>
      </div>
      <div class="clearer"></div>
    </div>
    <div class="related" role="navigation" aria-label="related navigation">
      <h3>Navigation</h3>
      <ul>
        <li class="nav-item nav-item-0"><a href="../../../index.html">PySpark 1.5.0 documentation</a> &raquo;</li>
          <li class="nav-item nav-item-1"><a href="../../index.html" >Module code</a> &raquo;</li> 
      </ul>
    </div>
    <div class="footer" role="contentinfo">
        &copy; Copyright .
      Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.3.1.
    </div>
  </body>
</html>