aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/context.py
diff options
context:
space:
mode:
authorAaron Staple <aaron.staple@gmail.com>2014-09-15 19:28:17 -0700
committerJosh Rosen <joshrosen@apache.org>2014-09-15 19:28:17 -0700
commit60050f42885582a699fc7a6fa0529964162bb8a3 (patch)
tree4014b35f39ee99dcd3d26b288dba0e7555a97ad6 /python/pyspark/context.py
parentda33acb8b681eca5e787d546fe922af76a151398 (diff)
downloadspark-60050f42885582a699fc7a6fa0529964162bb8a3.tar.gz
spark-60050f42885582a699fc7a6fa0529964162bb8a3.tar.bz2
spark-60050f42885582a699fc7a6fa0529964162bb8a3.zip
[SPARK-1087] Move python traceback utilities into new traceback_utils.py file.
Also made some cosmetic cleanups. Author: Aaron Staple <aaron.staple@gmail.com> Closes #2385 from staple/SPARK-1087 and squashes the following commits: 7b3bb13 [Aaron Staple] Address review comments, cosmetic cleanups. 10ba6e1 [Aaron Staple] [SPARK-1087] Move python traceback utilities into new traceback_utils.py file.
Diffstat (limited to 'python/pyspark/context.py')
-rw-r--r--python/pyspark/context.py8
1 files changed, 2 insertions, 6 deletions
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index ea28e8cd8c..a33aae87f6 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -20,7 +20,6 @@ import shutil
import sys
from threading import Lock
from tempfile import NamedTemporaryFile
-from collections import namedtuple
from pyspark import accumulators
from pyspark.accumulators import Accumulator
@@ -33,6 +32,7 @@ from pyspark.serializers import PickleSerializer, BatchedSerializer, UTF8Deseria
from pyspark.storagelevel import StorageLevel
from pyspark import rdd
from pyspark.rdd import RDD
+from pyspark.traceback_utils import CallSite, first_spark_call
from py4j.java_collections import ListConverter
@@ -99,11 +99,7 @@ class SparkContext(object):
...
ValueError:...
"""
- if rdd._extract_concise_traceback() is not None:
- self._callsite = rdd._extract_concise_traceback()
- else:
- tempNamedTuple = namedtuple("Callsite", "function file linenum")
- self._callsite = tempNamedTuple(function=None, file=None, linenum=None)
+ self._callsite = first_spark_call() or CallSite(None, None, None)
SparkContext._ensure_initialized(self, gateway=gateway)
try:
self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,