From 217257879fe7c98673caf14b980790498887581e Mon Sep 17 00:00:00 2001 From: Dale Date: Fri, 26 Dec 2014 20:33:05 +1100 Subject: [PATCH] [SPARK-4787] Stop context properly if an exception occurs during DAGScheduler initialization. --- core/src/main/scala/org/apache/spark/SparkContext.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 57bc3d4e4ae36..2778975976b26 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -329,8 +329,11 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli try { dagScheduler = new DAGScheduler(this) } catch { - case e: Exception => throw - new SparkException("DAGScheduler cannot be initialized due to %s".format(e.getMessage)) + case e: Exception => { + stop() + throw + new SparkException("DAGScheduler cannot be initialized due to %s".format(e.getMessage)) + } } // start TaskScheduler after taskScheduler sets DAGScheduler reference in DAGScheduler's