aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorEric Liang <ekl@databricks.com>2016-06-22 12:12:34 -0700
committerDavies Liu <davies.liu@gmail.com>2016-06-22 12:12:34 -0700
commit6f915c9ec24003877d1ef675a59145699780a2ff (patch)
tree07fd03c29e7045b60c535baa6e2fae2cfe578330 /core
parent472d611a70da02d95e36da754435a3ac562f8b24 (diff)
downloadspark-6f915c9ec24003877d1ef675a59145699780a2ff.tar.gz
spark-6f915c9ec24003877d1ef675a59145699780a2ff.tar.bz2
spark-6f915c9ec24003877d1ef675a59145699780a2ff.zip
[SPARK-16003] SerializationDebugger runs into infinite loop
## What changes were proposed in this pull request? This fixes SerializationDebugger to not recurse forever when `writeReplace` returns an object of the same class, which is the case for at least the `SQLMetrics` class. See also the OpenJDK unit tests on the behavior of recursive `writeReplace()`: https://github.com/openjdk-mirror/jdk7u-jdk/blob/f4d80957e89a19a29bb9f9807d2a28351ed7f7df/test/java/io/Serializable/nestedReplace/NestedReplace.java cc davies cloud-fan ## How was this patch tested? Unit tests for SerializationDebugger. Author: Eric Liang <ekl@databricks.com> Closes #13814 from ericl/spark-16003.
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/org/apache/spark/serializer/SerializationDebugger.scala9
-rw-r--r--core/src/test/scala/org/apache/spark/serializer/SerializationDebuggerSuite.scala13
2 files changed, 16 insertions, 6 deletions
diff --git a/core/src/main/scala/org/apache/spark/serializer/SerializationDebugger.scala b/core/src/main/scala/org/apache/spark/serializer/SerializationDebugger.scala
index c04b483831..5e7a98c8aa 100644
--- a/core/src/main/scala/org/apache/spark/serializer/SerializationDebugger.scala
+++ b/core/src/main/scala/org/apache/spark/serializer/SerializationDebugger.scala
@@ -155,7 +155,7 @@ private[spark] object SerializationDebugger extends Logging {
// If the object has been replaced using writeReplace(),
// then call visit() on it again to test its type again.
- if (!finalObj.eq(o)) {
+ if (finalObj.getClass != o.getClass) {
return visit(finalObj, s"writeReplace data (class: ${finalObj.getClass.getName})" :: stack)
}
@@ -265,11 +265,10 @@ private[spark] object SerializationDebugger extends Logging {
if (!desc.hasWriteReplaceMethod) {
(o, desc)
} else {
- // write place
val replaced = desc.invokeWriteReplace(o)
- // `writeReplace` may return the same object.
- if (replaced eq o) {
- (o, desc)
+ // `writeReplace` recursion stops when the returned object has the same class.
+ if (replaced.getClass == o.getClass) {
+ (replaced, desc)
} else {
findObjectAndDescriptor(replaced)
}
diff --git a/core/src/test/scala/org/apache/spark/serializer/SerializationDebuggerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/SerializationDebuggerSuite.scala
index f019b1e259..912a516dff 100644
--- a/core/src/test/scala/org/apache/spark/serializer/SerializationDebuggerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/SerializationDebuggerSuite.scala
@@ -126,7 +126,11 @@ class SerializationDebuggerSuite extends SparkFunSuite with BeforeAndAfterEach {
assert(find(new SerializableClassWithWriteReplace(new SerializableClass1)).isEmpty)
}
- test("object containing writeObject() and not serializable field") {
+ test("no infinite loop with writeReplace() which returns class of its own type") {
+ assert(find(new SerializableClassWithRecursiveWriteReplace).isEmpty)
+ }
+
+ test("object containing writeObject() and not serializable field") {
val s = find(new SerializableClassWithWriteObject(new NotSerializable))
assert(s.size === 3)
assert(s(0).contains("NotSerializable"))
@@ -229,6 +233,13 @@ class SerializableClassWithWriteReplace(@(transient @param) replacementFieldObje
}
+class SerializableClassWithRecursiveWriteReplace extends Serializable {
+ private def writeReplace(): Object = {
+ new SerializableClassWithRecursiveWriteReplace
+ }
+}
+
+
class ExternalizableClass(objectField: Object) extends java.io.Externalizable {
val serializableObjectField = new SerializableClass1