-
Notifications
You must be signed in to change notification settings - Fork 28.5k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[SPARK-2778] [yarn] Add yarn integration tests.
This patch adds a couple of, currently, very simple integration tests to make sure both client and cluster modes are working. The tests don't do much yet other than run a simple job, but the plan is to enhance them after we get the framework in. The cluster tests are noisy, so redirect all log output to a file like other tests do. Copying the conf around sucks but it's less work than messing with maven/sbt and having to clean up other projects. Note the test is only added for yarn-stable. The code compiles against yarn-alpha but there are two issues I ran into that I could not overcome: - and old netty dependency kept creeping into the classpath and causing akka to not work, when using sbt; the old netty was correctly suppressed under maven. - MiniYARNCluster kept failing to execute containers because it did not create the NM's local dir itself; this is apparently a known behavior, but I'm not sure how to work around it. None of those issues are present with the stable Yarn. Also, these tests are a little slow to run. Apparently Spark doesn't yet tag tests (so that these could be isolated in a "slow" batch), so this is something to keep in mind.
- Loading branch information
Marcelo Vanzin
committed
Sep 3, 2014
1 parent
f2b5b61
commit add8416
Showing
5 changed files
with
209 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
# | ||
# Licensed to the Apache Software Foundation (ASF) under one or more | ||
# contributor license agreements. See the NOTICE file distributed with | ||
# this work for additional information regarding copyright ownership. | ||
# The ASF licenses this file to You under the Apache License, Version 2.0 | ||
# (the "License"); you may not use this file except in compliance with | ||
# the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
# | ||
|
||
# Set everything to be logged to the file core/target/unit-tests.log | ||
log4j.rootCategory=INFO, file | ||
log4j.appender.file=org.apache.log4j.FileAppender | ||
log4j.appender.file.append=false | ||
log4j.appender.file.file=target/unit-tests.log | ||
log4j.appender.file.layout=org.apache.log4j.PatternLayout | ||
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %p %c{1}: %m%n | ||
|
||
# Ignore messages below warning level from Jetty, because it's a bit verbose | ||
log4j.logger.org.eclipse.jetty=WARN | ||
org.eclipse.jetty.LEVEL=WARN |
141 changes: 141 additions & 0 deletions
141
yarn/stable/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,141 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one or more | ||
* contributor license agreements. See the NOTICE file distributed with | ||
* this work for additional information regarding copyright ownership. | ||
* The ASF licenses this file to You under the Apache License, Version 2.0 | ||
* (the "License"); you may not use this file except in compliance with | ||
* the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.spark.deploy.yarn | ||
|
||
import java.io.File | ||
|
||
import scala.collection.JavaConversions._ | ||
import scala.collection.mutable.HashMap | ||
|
||
import com.google.common.base.Charsets | ||
import com.google.common.io.Files | ||
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers} | ||
|
||
import org.apache.hadoop.yarn.conf.YarnConfiguration | ||
import org.apache.hadoop.yarn.server.MiniYARNCluster | ||
|
||
import org.apache.spark.{Logging, SparkConf, SparkContext} | ||
import org.apache.spark.deploy.SparkHadoopUtil | ||
import org.apache.spark.util.Utils | ||
|
||
class YarnClusterSuite extends FunSuite with BeforeAndAfterAll with Matchers { | ||
|
||
private val oldConf = new HashMap[String, String]() | ||
private var yarnCluster: MiniYARNCluster = _ | ||
private var tempDir: File = _ | ||
private var fakeSparkJar: File = _ | ||
|
||
override def beforeAll() { | ||
tempDir = Utils.createTempDir() | ||
|
||
yarnCluster = new MiniYARNCluster(getClass().getName(), 1, 1, 1, 1, false) | ||
yarnCluster.init(new YarnConfiguration()) | ||
yarnCluster.start() | ||
|
||
val sysProps = sys.props.map { case (k, v) => (k, v) } | ||
sysProps.foreach { case (k, v) => | ||
if (k.startsWith("spark.")) { | ||
oldConf += (k -> v) | ||
sys.props -= k | ||
} | ||
} | ||
|
||
yarnCluster.getConfig().foreach { e => | ||
sys.props += ("spark.hadoop." + e.getKey() -> e.getValue()) | ||
} | ||
|
||
fakeSparkJar = File.createTempFile("sparkJar", null, tempDir) | ||
sys.props += ("spark.yarn.jar" -> ("local:" + fakeSparkJar.getAbsolutePath())) | ||
sys.props += ("spark.executor.instances" -> "1") | ||
sys.props += ("spark.driver.extraClassPath" -> sys.props("java.class.path")) | ||
sys.props += ("spark.executor.extraClassPath" -> sys.props("java.class.path")) | ||
|
||
super.beforeAll() | ||
} | ||
|
||
override def afterAll() { | ||
yarnCluster.stop() | ||
|
||
val sysProps = sys.props.map { case (k, v) => (k, v) } | ||
sysProps.foreach { case (k, v) => | ||
if (k.startsWith("spark.")) { | ||
sys.props -= k | ||
} | ||
} | ||
|
||
oldConf.foreach { case (k, v) => sys.props += (k -> v) } | ||
|
||
super.afterAll() | ||
} | ||
|
||
test("run Spark in yarn-client mode") { | ||
var result = File.createTempFile("result", null, tempDir) | ||
YarnClusterDriver.main(Array("yarn-client", result.getAbsolutePath())) | ||
checkResult(result) | ||
} | ||
|
||
test("run Spark in yarn-cluster mode") { | ||
val main = YarnClusterDriver.getClass.getName().stripSuffix("$") | ||
var result = File.createTempFile("result", null, tempDir) | ||
|
||
// The Client object will call System.exit() after the job is done, and we don't want | ||
// that because it messes up the scalatest monitoring. So replicate some of what main() | ||
// does here. | ||
val args = Array("--class", main, | ||
"--jar", "file:" + fakeSparkJar.getAbsolutePath(), | ||
"--arg", "yarn-cluster", | ||
"--arg", result.getAbsolutePath(), | ||
"--num-executors", "4") | ||
val sparkConf = new SparkConf() | ||
val yarnConf = SparkHadoopUtil.get.newConfiguration(sparkConf) | ||
val clientArgs = new ClientArguments(args, sparkConf) | ||
new Client(clientArgs, yarnConf, sparkConf).run() | ||
checkResult(result) | ||
} | ||
|
||
/** | ||
* This is a workaround for an issue with yarn-cluster mode: the Client class will not provide | ||
* any sort of error when the job process finishes successfully, but the job itself fails. So | ||
* the tests enforce that something is written to a file after everything is ok to indicate | ||
* that the job succeeded. | ||
*/ | ||
private def checkResult(result: File) = { | ||
var resultString = Files.toString(result, Charsets.UTF_8) | ||
resultString should be ("success") | ||
} | ||
|
||
} | ||
|
||
private object YarnClusterDriver extends Logging with Matchers { | ||
|
||
def main(args: Array[String]) = { | ||
val sc = new SparkContext(new SparkConf().setMaster(args(0)) | ||
.setAppName("yarn \"test app\" 'with quotes'")) | ||
val status = new File(args(1)) | ||
var result = "failure" | ||
try { | ||
val data = sc.parallelize(1 to 4).map(i => i).collect().toSet | ||
data should be (Set(1, 2, 3, 4)) | ||
result = "success" | ||
} finally { | ||
sc.stop() | ||
Files.write(result, status, Charsets.UTF_8) | ||
} | ||
} | ||
|
||
} |