Skip to content

Commit

Permalink
Remove setProperty calls in SparkContextSchedulerCreationSuite
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Dec 24, 2014
1 parent 655587c commit bee20df
Showing 1 changed file with 14 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,17 @@ import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, TaskSchedule
import org.apache.spark.scheduler.cluster.{SimrSchedulerBackend, SparkDeploySchedulerBackend}
import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
import org.apache.spark.scheduler.local.LocalBackend
import org.apache.spark.util.ResetSystemProperties

class SparkContextSchedulerCreationSuite
extends FunSuite with LocalSparkContext with PrivateMethodTester with Logging
with ResetSystemProperties {
extends FunSuite with LocalSparkContext with PrivateMethodTester with Logging {

def createTaskScheduler(master: String): TaskSchedulerImpl = {
def createTaskScheduler(master: String): TaskSchedulerImpl =
createTaskScheduler(master, new SparkConf())

def createTaskScheduler(master: String, conf: SparkConf): TaskSchedulerImpl = {
// Create local SparkContext to setup a SparkEnv. We don't actually want to start() the
// real schedulers, so we don't want to create a full SparkContext with the desired scheduler.
sc = new SparkContext("local", "test")
sc = new SparkContext("local", "test", conf)
val createTaskSchedulerMethod =
PrivateMethod[Tuple2[SchedulerBackend, TaskScheduler]]('createTaskScheduler)
val (_, sched) = SparkContext invokePrivate createTaskSchedulerMethod(sc, master)
Expand Down Expand Up @@ -104,8 +105,8 @@ class SparkContextSchedulerCreationSuite
}

test("local-default-parallelism") {
System.setProperty("spark.default.parallelism", "16")
val sched = createTaskScheduler("local")
val conf = new SparkConf().set("spark.default.parallelism", "16")
val sched = createTaskScheduler("local", conf)

sched.backend match {
case s: LocalBackend => assert(s.defaultParallelism() === 16)
Expand Down Expand Up @@ -151,9 +152,10 @@ class SparkContextSchedulerCreationSuite
testYarn("yarn-client", "org.apache.spark.scheduler.cluster.YarnClientClusterScheduler")
}

def testMesos(master: String, expectedClass: Class[_]) {
def testMesos(master: String, expectedClass: Class[_], coarse: Boolean) {
val conf = new SparkConf().set("spark.mesos.coarse", coarse.toString)
try {
val sched = createTaskScheduler(master)
val sched = createTaskScheduler(master, conf)
assert(sched.backend.getClass === expectedClass)
} catch {
case e: UnsatisfiedLinkError =>
Expand All @@ -164,17 +166,14 @@ class SparkContextSchedulerCreationSuite
}

test("mesos fine-grained") {
System.setProperty("spark.mesos.coarse", "false")
testMesos("mesos://localhost:1234", classOf[MesosSchedulerBackend])
testMesos("mesos://localhost:1234", classOf[MesosSchedulerBackend], coarse = false)
}

test("mesos coarse-grained") {
System.setProperty("spark.mesos.coarse", "true")
testMesos("mesos://localhost:1234", classOf[CoarseMesosSchedulerBackend])
testMesos("mesos://localhost:1234", classOf[CoarseMesosSchedulerBackend], coarse = true)
}

test("mesos with zookeeper") {
System.setProperty("spark.mesos.coarse", "false")
testMesos("zk://localhost:1234,localhost:2345", classOf[MesosSchedulerBackend])
testMesos("zk://localhost:1234,localhost:2345", classOf[MesosSchedulerBackend], coarse = false)
}
}

0 comments on commit bee20df

Please sign in to comment.