1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark import since
from pyspark.rdd import ignore_unicode_prefix
class RuntimeConfig(object):
"""User-facing configuration API, accessible through `SparkSession.conf`.
Options set here are automatically propagated to the Hadoop configuration during I/O.
This a thin wrapper around its Scala implementation org.apache.spark.sql.RuntimeConfig.
"""
def __init__(self, jconf):
"""Create a new RuntimeConfig that wraps the underlying JVM object."""
self._jconf = jconf
@ignore_unicode_prefix
@since(2.0)
def set(self, key, value):
"""Sets the given Spark runtime configuration property."""
self._jconf.set(key, value)
@ignore_unicode_prefix
@since(2.0)
def get(self, key, default=None):
"""Returns the value of Spark runtime configuration property for the given key,
assuming it is set.
"""
self._checkType(key, "key")
if default is None:
return self._jconf.get(key)
else:
self._checkType(default, "default")
return self._jconf.get(key, default)
@ignore_unicode_prefix
@since(2.0)
def unset(self, key):
"""Resets the configuration property for the given key."""
self._jconf.unset(key)
def _checkType(self, obj, identifier):
"""Assert that an object is of type str."""
if not isinstance(obj, str) and not isinstance(obj, unicode):
raise TypeError("expected %s '%s' to be a string (was '%s')" %
(identifier, obj, type(obj).__name__))
def _test():
import os
import doctest
from pyspark.context import SparkContext
from pyspark.sql.session import SparkSession
import pyspark.sql.conf
os.chdir(os.environ["SPARK_HOME"])
globs = pyspark.sql.conf.__dict__.copy()
sc = SparkContext('local[4]', 'PythonTest')
globs['sc'] = sc
globs['spark'] = SparkSession(sc)
(failure_count, test_count) = doctest.testmod(pyspark.sql.conf, globs=globs)
globs['sc'].stop()
if failure_count:
exit(-1)
if __name__ == "__main__":
_test()
|