Skip to content

Commit

Permalink
[SPARK-22660][BUILD] Use position() and limit() to fix ambiguity issu…
Browse files Browse the repository at this point in the history
…e in scala-2.12

## What changes were proposed in this pull request?
Missing some changes about limit in TaskSetManager.scala

## How was this patch tested?
running tests

Please review http://spark.apache.org/contributing.html before opening a pull request.

Author: kellyzly <kellyzly@126.com>

Closes #19976 from kellyzly/SPARK-22660.2.
  • Loading branch information
kellyzly authored and srowen committed Dec 14, 2017
1 parent 40de176 commit 6d99940
Showing 1 changed file with 3 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -488,11 +488,11 @@ private[spark] class TaskSetManager(
abort(s"$msg Exception during serialization: $e")
throw new TaskNotSerializableException(e)
}
if (serializedTask.limit > TaskSetManager.TASK_SIZE_TO_WARN_KB * 1024 &&
if (serializedTask.limit() > TaskSetManager.TASK_SIZE_TO_WARN_KB * 1024 &&
!emittedTaskSizeWarning) {
emittedTaskSizeWarning = true
logWarning(s"Stage ${task.stageId} contains a task of very large size " +
s"(${serializedTask.limit / 1024} KB). The maximum recommended task size is " +
s"(${serializedTask.limit() / 1024} KB). The maximum recommended task size is " +
s"${TaskSetManager.TASK_SIZE_TO_WARN_KB} KB.")
}
addRunningTask(taskId)
Expand All @@ -502,7 +502,7 @@ private[spark] class TaskSetManager(
// val timeTaken = clock.getTime() - startTime
val taskName = s"task ${info.id} in stage ${taskSet.id}"
logInfo(s"Starting $taskName (TID $taskId, $host, executor ${info.executorId}, " +
s"partition ${task.partitionId}, $taskLocality, ${serializedTask.limit} bytes)")
s"partition ${task.partitionId}, $taskLocality, ${serializedTask.limit()} bytes)")

sched.dagScheduler.taskStarted(task, info)
new TaskDescription(
Expand Down

0 comments on commit 6d99940

Please sign in to comment.