diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala index 3e09c2599085c..96697d2e603a2 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala @@ -174,8 +174,9 @@ class ExternalAppendOnlyMap[K, V, C]( private def spill(mapSize: Long): Unit = { spillCount += 1 val threadId = Thread.currentThread().getId - logInfo("Thread %d spilling in-memory map of %d MB to disk (%d time%s so far)" - .format(threadId, mapSize / (1024 * 1024), spillCount, if (spillCount > 1) "s" else "")) + logInfo("Thread %d spilling in-memory batch of %s to disk (%d times%s so far)" + .format(threadId, org.apache.spark.util.Utils.bytesToString(mapSize), + spillCount, if (spillCount > 1) "s" else "")) val (blockId, file) = diskBlockManager.createTempLocalBlock() curWriteMetrics = new ShuffleWriteMetrics() var writer = blockManager.getDiskWriter(blockId, file, serializer, fileBufferSize, diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala index 97ddd96c98268..d414ce39e9ae0 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala @@ -268,8 +268,9 @@ private[spark] class ExternalSorter[K, V, C]( spillCount += 1 val threadId = Thread.currentThread().getId - logInfo("Thread %d spilling in-memory batch of %d MB to disk (%d spill%s so far)" - .format(threadId, memorySize / (1024 * 1024), spillCount, if (spillCount > 1) "s" else "")) + logInfo("Thread %d spilling in-memory batch of %s to disk (%d spill%s so far)" + .format(threadId, org.apache.spark.util.Utils.bytesToString(memorySize), + spillCount, if (spillCount > 1) "s" else "")) if (bypassMergeSort) { spillToPartitionFiles(collection)