Skip to content
This repository has been archived by the owner on Oct 12, 2023. It is now read-only.

Enable/disable merge task #39

Merged
merged 8 commits into from
Jul 13, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 56 additions & 36 deletions R/doAzureParallel.R
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,23 @@ setVerbose <- function(value = FALSE){
.doAzureBatchGlobals)
}

cloudCombine <- list()
enableCloudCombine <- TRUE
if(!is.null(obj$options$azure$enableCloudCombine) && is.logical(obj$options$azure$enableCloudCombine)){
enableCloudCombine <- obj$options$azure$enableCloudCombine
}

if(!is.null(obj$options$azure$cloudCombine)){
# cloudCombine <- obj$options$azure$cloudCombine
}

if(!enableCloudCombine){
cloudCombine <- NULL
}

assign("enableCloudCombine", enableCloudCombine, envir=.doAzureBatchGlobals)
assign("cloudCombine", cloudCombine, envir=.doAzureBatchGlobals)

retryCounter <- 0
maxRetryCount <- 5
startupFolderName <- "startup"
Expand Down Expand Up @@ -327,47 +344,50 @@ setVerbose <- function(value = FALSE){
envir = .doAzureBatchGlobals,
packages = obj$packages,
dependsOn = tasks,
cloudCombine = cloudCombine,
outputFiles = obj$options$azure$outputFiles)

if(wait){
waitForTasksToComplete(id, jobTimeout, progress = !is.null(obj$progress), tasks = nout + 1)

response <- downloadBlob(id, paste0("result/", id, "-merge-result.rds"), sasToken = sasToken, accountName = storageCredentials$name)
tempFile <- tempfile("doAzureParallel", fileext = ".rds")
bin <- content(response, "raw")
writeBin(bin, tempFile)
results <- readRDS(tempFile)

failTasks <- sapply(results, .isError)

numberOfFailedTasks <- sum(unlist(failTasks))

if(numberOfFailedTasks > 0){
.createErrorViewerPane(id, failTasks)
}

accumulator <- makeAccum(it)

tryCatch(accumulator(results, seq(along = results)), error = function(e){
cat('error calling combine function:\n')
print(e)
})

# check for errors
errorValue <- getErrorValue(it)
errorIndex <- getErrorIndex(it)

print(sprintf("Number of errors: %i", numberOfFailedTasks))

deleteJob(id)

if (identical(obj$errorHandling, 'stop') && !is.null(errorValue)) {
msg <- sprintf('task %d failed - "%s"', errorIndex,
conditionMessage(errorValue))
stop(simpleError(msg, call=expr))
}
else {
getResult(it)
if(typeof(cloudCombine) == "list" && enableCloudCombine){
response <- downloadBlob(id, paste0("result/", id, "-merge-result.rds"), sasToken = sasToken, accountName = storageCredentials$name)
tempFile <- tempfile("doAzureParallel", fileext = ".rds")
bin <- content(response, "raw")
writeBin(bin, tempFile)
results <- readRDS(tempFile)

failTasks <- sapply(results, .isError)

numberOfFailedTasks <- sum(unlist(failTasks))

if(numberOfFailedTasks > 0){
.createErrorViewerPane(id, failTasks)
}

accumulator <- makeAccum(it)

tryCatch(accumulator(results, seq(along = results)), error = function(e){
cat('error calling combine function:\n')
print(e)
})

# check for errors
errorValue <- getErrorValue(it)
errorIndex <- getErrorIndex(it)

print(sprintf("Number of errors: %i", numberOfFailedTasks))

deleteJob(id)

if (identical(obj$errorHandling, 'stop') && !is.null(errorValue)) {
msg <- sprintf('task %d failed - "%s"', errorIndex,
conditionMessage(errorValue))
stop(simpleError(msg, call=expr))
}
else {
getResult(it)
}
}
}
else{
Expand Down
7 changes: 6 additions & 1 deletion R/helpers.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,17 @@
.doAzureBatchGlobals <- args$envir
argsList <- args$args
dependsOn <- args$dependsOn
cloudCombine <- args$cloudCombine
userOutputFiles <- args$outputFiles

if (!is.null(argsList)) {
assign('argsList', argsList, .doAzureBatchGlobals)
}

if (!is.null(cloudCombine)) {
assign('cloudCombine', cloudCombine, .doAzureBatchGlobals)
}

envFile <- paste0(taskId, ".rds")
saveRDS(argsList, file = envFile)
rAzureBatch::uploadBlob(jobId, paste0(getwd(), "/", envFile))
Expand Down Expand Up @@ -88,7 +93,7 @@
)

outputFiles <- append(outputFiles, userOutputFiles)
commands <- c("export PATH=/anaconda/envs/py35/bin:$PATH", downloadCommand, rCommand)# downloadCommand, , logsCommand, autoUploadCommand, stderrUploadCommand)
commands <- c("export PATH=/anaconda/envs/py35/bin:$PATH", downloadCommand, rCommand)

commands <- linuxWrapCommands(commands)

Expand Down
Loading