Skip to content

Commit

Permalink
Optimize performance for partitions check
Browse files Browse the repository at this point in the history
  • Loading branch information
liguoqiang committed Mar 3, 2014
1 parent 61e5a87 commit 3348619
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -847,8 +847,8 @@ class SparkContext(
partitions: Seq[Int],
allowLocal: Boolean,
resultHandler: (Int, U) => Unit) {
val rddPartitions = rdd.partitions.map(_.index)
require(partitions.forall(rddPartitions.contains(_)), "partition index out of range")
val partitionRange = (0 until rdd.partitions.size)
require(partitions.forall(partitionRange.contains(_)), "partition index out of range")
val callSite = getCallSite
val cleanedFunc = clean(func)
logInfo("Starting job: " + callSite)
Expand Down Expand Up @@ -952,8 +952,8 @@ class SparkContext(
resultHandler: (Int, U) => Unit,
resultFunc: => R): SimpleFutureAction[R] =
{
val rddPartitions = rdd.partitions.map(_.index)
require(partitions.forall(rddPartitions.contains(_)), "partition index out of range")
val partitionRange = (0 until rdd.partitions.size)
require(partitions.forall(partitionRange.contains(_)), "partition index out of range")
val cleanF = clean(processPartition)
val callSite = getCallSite
val waiter = dagScheduler.submitJob(
Expand Down

0 comments on commit 3348619

Please sign in to comment.