File tree Expand file tree Collapse file tree 1 file changed +9
-1
lines changed
core/src/main/scala/org/apache/spark/rdd Expand file tree Collapse file tree 1 file changed +9
-1
lines changed Original file line number Diff line number Diff line change @@ -112,7 +112,7 @@ abstract class RDD[T: ClassTag](
112112 * because DAGs are acyclic, and we only ever hold locks for one path in that DAG, there is no
113113 * chance of deadlock.
114114 */
115- private val stateLock = new Object ()
115+ private val stateLock = new SerializableObject ()
116116
117117 /** Construct an RDD with just a one-to-one dependency on one parent */
118118 def this (@ transient oneParent : RDD [_]) =
@@ -1978,6 +1978,14 @@ abstract class RDD[T: ClassTag](
19781978 deterministicLevelCandidates.maxBy(_.id)
19791979 }
19801980 }
1981+
1982+ /**
1983+ * The only purpose of this class is to have something to lock (like a generic Object) but that
1984+ * will also survive serialization. Obviously, the locks themselves don't survive serialization,
1985+ * but we really only need the locks in the driver, and just don't want things to break with NPEs
1986+ * on the executors.
1987+ */
1988+ private class SerializableObject extends Serializable
19811989}
19821990
19831991
You can’t perform that action at this time.
0 commit comments