diff --git a/R/pkg/R/context.R b/R/pkg/R/context.R index 7a8bffa1ccd43..438d77a388f0e 100644 --- a/R/pkg/R/context.R +++ b/R/pkg/R/context.R @@ -87,8 +87,9 @@ objectFile <- function(sc, path, minPartitions = NULL) { #' in the list are split into \code{numSlices} slices and distributed to nodes #' in the cluster. #' -#' If size of serialized slices is larger than 2GB (or INT_MAX bytes), the function -#' will write it to disk and send the file name to JVM. +#' If size of serialized slices is larger than spark.r.maxAllocationLimit or (200MB), the function +#' will write it to disk and send the file name to JVM. Also to make sure each slice is not +#' larger than that limit, number of slices may be increased. #' #' @param sc SparkContext to use #' @param coll collection to parallelize