Skip to content

Commit

Permalink
[SPARK-7504] [YARN] NullPointerException when initializing SparkConte…
Browse files Browse the repository at this point in the history
…xt in YARN-cluster mode

Added a simple checking for SparkContext.
Also added two rational checking against null at AM object.
  • Loading branch information
zzvara committed May 12, 2015
1 parent f3e8e60 commit 9f287c5
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 1 deletion.
7 changes: 7 additions & 0 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,13 @@ class SparkContext(config: SparkConf) extends Logging with ExecutorAllocationCli
throw new SparkException("An application name must be set in your configuration")
}

// Thread name has been set to "Driver" if user code ran by AM on a YARN cluster
if (master == "yarn-cluster" &&
Thread.currentThread().getName != "Driver") {
throw new SparkException("Detected yarn-cluster mode, but isn't running on a cluster. " +
"Deployment to YARN is not supported directly by SparkContext. Please use spark-submit.")
}

if (_conf.getBoolean("spark.logConf", false)) {
logInfo("Spark configuration:\n" + _conf.toDebugString)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -453,6 +453,7 @@ private[spark] class ApplicationMaster(
private def startUserApplication(): Thread = {
logInfo("Starting the user application in a separate Thread")
System.setProperty("spark.executor.instances", args.numExecutors.toString)
// System.setProperty("spark.yarn.am.thread", "yarn-cluster")

val classpath = Client.getUserClasspath(sparkConf)
val urls = classpath.map { entry =>
Expand Down Expand Up @@ -573,13 +574,18 @@ object ApplicationMaster extends Logging {
}

private[spark] def sparkContextInitialized(sc: SparkContext): Unit = {
if (master == null){
throw new SparkException("ApplicationMaster is not initialized!")
}
master.sparkContextInitialized(sc)
}

private[spark] def sparkContextStopped(sc: SparkContext): Boolean = {
if (master == null){
throw new SparkException("ApplicationMaster is not initialized!")
}
master.sparkContextStopped(sc)
}

}

/**
Expand Down

0 comments on commit 9f287c5

Please sign in to comment.