Skip to content

Commit

Permalink
Add tests (#25)
Browse files Browse the repository at this point in the history
* Add tests

* [fix] Fix the error when no NetCDF files found in `summary_database()`

* [fix] Overwrite existing temp EPW file if necessary in `match_coord()`

* [fix] Fix the wrong warning messages in `morphing_epw()`

* [fix] Set alpha to zero if any missing values found for combined method

* [refactor] Remove unused functions

* [test] Add tests

* [check] Add mockery and withr to Suggests

* [check] Add curl  package to Suggests

* [CI] Use cache

* [check] Fix the error when getting cache dir

* Show cache contents for debug

* Fix

* Fix

* [CI] Skip cache on Windows

* [CI] Update code coverage action

* [CI] Fix cache

* [CI] Make sure NetCDF files are always downloaded before running tests

* [CI] Restore cache in GitHub Actions

* Remove testthat start-first config since it works only with 3rd ed.

* [CI] Set NOT_CRAN to `true` on macOS

* [CI] Remove CMD check on macOS since code coverage runs on macOS

* Bump dev version
  • Loading branch information
hongyuanjia authored Nov 22, 2021
1 parent 86f058a commit e656b9a
Show file tree
Hide file tree
Showing 16 changed files with 1,169 additions and 113 deletions.
16 changes: 15 additions & 1 deletion .github/workflows/R-CMD-check.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ jobs:
matrix:
config:
- {os: windows-latest, r: 'release', not_cran: false}
- {os: macOS-latest, r: 'release', not_cran: false}
- {os: ubuntu-20.04, r: 'release', not_cran: true, rspm: "https://packagemanager.rstudio.com/cran/__linux__/focal/latest"}

env:
Expand Down Expand Up @@ -69,6 +68,21 @@ jobs:
remotes::install_cran("rcmdcheck")
shell: Rscript {0}

- name: Set EPWSHIFTR_CHECK_CACHE
if: runner.os != 'Windows'
shell: bash
run: |
echo "EPWSHIFTR_CHECK_CACHE=${{ runner.temp }}/epwshiftr" >> $GITHUB_ENV
if [ ! -d "${{ runner.temp }}/epwshiftr" ]; then mkdir -p "${{ runner.temp }}/epwshiftr"; fi
- name: Cache NetCDF files
if: runner.os != 'Windows'
uses: actions/cache@v2
with:
path: ${{ env.EPWSHIFTR_CHECK_CACHE }}
key: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1-${{ hashFiles('DESCRIPTION') }}
restore-keys: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1-

- name: Check
env:
_R_CHECK_CRAN_INCOMING_REMOTE_: false
Expand Down
13 changes: 13 additions & 0 deletions .github/workflows/test-coverage.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,19 @@ jobs:
remotes::install_cran("covr")
shell: Rscript {0}

- name: Set EPWSHIFTR_CHECK_CACHE
shell: bash
run: |
echo "EPWSHIFTR_CHECK_CACHE=${{ runner.temp }}/epwshiftr" >> $GITHUB_ENV
if [ ! -d "${{ runner.temp }}/epwshiftr" ]; then mkdir -p "${{ runner.temp }}/epwshiftr"; fi
- name: Cache NetCDF files
uses: actions/cache@v2
with:
path: ${{ env.EPWSHIFTR_CHECK_CACHE }}
key: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1-${{ hashFiles('DESCRIPTION') }}
restore-keys: ${{ runner.os }}-${{ hashFiles('.github/R-version') }}-1-

- name: Test coverage
run: covr::codecov(quiet = FALSE)
shell: Rscript {0}
15 changes: 9 additions & 6 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: epwshiftr
Title: Create Future 'EnergyPlus' Weather Files using 'CMIP6' Data
Version: 0.1.3.9000
Version: 0.1.3.9001
Authors@R: c(
person(given = "Hongyuan",
family = "Jia",
Expand Down Expand Up @@ -33,7 +33,10 @@ Imports:
RNetCDF,
units
Suggests:
testthat,
testthat (>= 3.0.0),
curl,
mockery,
withr,
pingr,
knitr,
rmarkdown
Expand All @@ -42,12 +45,12 @@ Encoding: UTF-8
URL: https://github.com/ideas-lab-nus/epwshiftr
BugReports: https://github.com/ideas-lab-nus/epwshiftr/issues
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.1.1
RoxygenNote: 7.1.2
Collate:
'coord.R'
'utils.R'
'epwshiftr-package.R'
'utils.R'
'esgf.R'
'morph.R'
'netcdf.R'
'coord.R'
'morph.R'
VignetteBuilder: knitr
12 changes: 12 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,18 @@
data are split by the `by` argument and also the generated future EPWs and
their paths are returned (#18).

## Bug fixes

* Fix the error in `summary_database()` when no NetCDF files are found in the
input directory (#25).
* Fix the error about overwriting temporary EPW file when `epw` in `match_coord() `
is a search string (#25).
* Now `morphing_epw()` can correctly fall back to use "Shift" method when any
missing values are detected in maximum and minimum prediction values of
climate variables (#25).
* Fix the wrong warning messages when `combined` method is used in
`morphing_epw()` (#25).

# epwshiftr 0.1.3

## Minor changes
Expand Down
2 changes: 1 addition & 1 deletion R/coord.R
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ match_coord <- function (epw, threshold = list(lon = 1.0, lat = 1.0), max_num =
dict <- extract_location_dict(epw)
if (is.null(dict)) return(invisible())
epw <- eplusr::read_epw(dict$epw_url)
epw$save(file.path(tempdir(), basename(dict$epw_url)))
epw$save(file.path(tempdir(), basename(dict$epw_url)), overwrite = TRUE)
}
}

Expand Down
2 changes: 2 additions & 0 deletions R/epwshiftr-package.R
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
EPWSHIFTR_ENV <- new.env(parent = emptyenv())
EPWSHIFTR_ENV$index_db <- NULL

# nocov start
# set package options
# reference: https://github.com/Rdatatable/data.table/blob/master/R/onLoad.R
.onLoad <- function (libname, pkgname) {
Expand All @@ -31,3 +32,4 @@ EPWSHIFTR_ENV$index_db <- NULL
}
invisible()
}
# nocov end
97 changes: 60 additions & 37 deletions R/esgf.R
Original file line number Diff line number Diff line change
Expand Up @@ -281,9 +281,11 @@ esgf_query <- function (

q <- tryCatch(jsonlite::read_json(q), warning = function (w) w, error = function (e) e)

# nocov start
if (inherits(q, "warning") || inherits(q, "error")) {
message("No matched data. Please check network connection and the availability of LLNL ESGF node.")
dt <- data.table::data.table()
# nocov end
} else if (q$response$numFound == 0L) {
message("No matched data. Please examine the actual response using 'attr(x, \"response\")'.")
dt <- data.table::data.table()
Expand Down Expand Up @@ -325,10 +327,12 @@ extract_query_file <- function (q) {
"variable_long_name", "variable_units", "data_node", "size", "url",
"tracking_id")]
l$url <- grep("HTTPServer", unlist(l$url), fixed = TRUE, value = TRUE)
# nocov start
if (!length(l$url)) {
warning("Dataset with id '", l$id, "' does not have a HTTPServer download method.")
l$url <- NA_character_
}
# nocov end
lapply(l, unlist)
}))

Expand Down Expand Up @@ -433,6 +437,7 @@ init_cmip6_index <- function (

if (!nrow(qd)) return(qd)

# nocov start
# give a warning if dataset query response hits the limits
if (nrow(qd) == 10000L) {
warning("The dataset query returns 10,000 results which ",
Expand All @@ -441,6 +446,7 @@ init_cmip6_index <- function (
"It is suggested to examine and refine your query."
)
}
# nocov end

dt <- data.table::set(qd, NULL, "file_url", NA_character_)

Expand Down Expand Up @@ -487,11 +493,13 @@ init_cmip6_index <- function (
}

verbose("Checking if data is complete")
# nocov start
if (anyNA(dt$file_url)) {
warning("There are still ", length(unique(dt$dataset_id[is.na(dt$file_url)])), " Dataset that ",
"did not find any matched output file after ", retry, " retries."
)
}
# nocov end

data.table::setcolorder(dt, c(
"file_id", "dataset_id", "mip_era", "activity_drs", "institution_id",
Expand Down Expand Up @@ -559,53 +567,58 @@ load_cmip6_index <- function (force = FALSE) {
stop(sprintf("CMIP6 experiment output file index does not exists. You may want to create one using 'init_cmip6_index()'."))
}

# nocov start
# load file info
idx <- tryCatch(
data.table::fread(f, colClasses = c("version" = "character", "file_size" = "double")),
warning = function(w) {
stop("Failed to parse CMIP6 experiment output file index. ", conditionMessage(w))
},
error = function (e) {
stop("Failed to parse CMIP6 experiment output file index.\n", conditionMessage(e))
stop("Failed to parse CMIP6 experiment output file index. ", conditionMessage(e))
}
)
message("Loading CMIP6 experiment output file index created at ", file.info(f)$mtime, ".")
# nocov end
verbose("Loading CMIP6 experiment output file index created at ", as.character(file.info(f)$mtime), ".")
}

# fix column types in case of empty values
if ("file_path" %in% names(idx)) {
data.table::set(idx, NULL, "file_path", as.character(idx$file_path))
idx[J(""), on = "file_path", file_path := NA_character_]
}
if ("file_realsize" %in% names(idx)) {
data.table::set(idx, NULL, "file_realsize", as.numeric(idx$file_realsize))
}
if ("file_mtime" %in% names(idx)) {
# to avoid No visible binding for global variable check NOTE
file_mtim <- NULL
if (is.character(idx$file_mtime)) {
idx[J(""), on = "file_mtime", file_mtime := NA]
}
idx[, file_mtime := as.POSIXct(file_mtime, origin = "1970-01-01", Sys.timezone())]
}
if ("time_units" %in% names(idx)) {
data.table::set(idx, NULL, "time_units", as.character(idx$time_units))
}
if ("time_calendar" %in% names(idx)) {
data.table::set(idx, NULL, "time_calendar", as.character(idx$time_calendar))
# fix column types in case of empty values
if ("file_path" %in% names(idx)) {
data.table::set(idx, NULL, "file_path", as.character(idx$file_path))
idx[J(""), on = "file_path", file_path := NA_character_]
}
if ("file_realsize" %in% names(idx)) {
data.table::set(idx, NULL, "file_realsize", as.numeric(idx$file_realsize))
}
if ("file_mtime" %in% names(idx)) {
# to avoid No visible binding for global variable check NOTE
file_mtime <- NULL
if (is.character(idx$file_mtime)) {
idx[J(""), on = "file_mtime", file_mtime := NA]
}
if ("datetime_start" %in% names(idx)) {
# to avoid No visible binding for global variable check NOTE
datetime_start <- NULL
if (is.character(idx$datetime_start)) {
idx[J(""), on = "datetime_start", datetime_start := NA]
}
data.table::set(idx, NULL, "datetime_start", as.POSIXct(idx$datetime_start, "UTC"))
idx[, file_mtime := setattr(as.POSIXct(file_mtime, origin = "1970-01-01"), "tzone", NULL)]
}
if ("time_units" %in% names(idx)) {
data.table::set(idx, NULL, "time_units", as.character(idx$time_units))
}
if ("time_calendar" %in% names(idx)) {
data.table::set(idx, NULL, "time_calendar", as.character(idx$time_calendar))
}
if ("datetime_start" %in% names(idx)) {
# to avoid No visible binding for global variable check NOTE
datetime_start <- NULL
if (is.character(idx$datetime_start)) {
idx[J(""), on = "datetime_start", datetime_start := NA]
}
if ("datetime_end" %in% names(idx)) {
# to avoid No visible binding for global variable check NOTE
datetime_end <- NULL
if (is.character(idx$datetime_end)) {
idx[J(""), on = "datetime_end", datetime_end := NA]
}
data.table::set(idx, NULL, "datetime_end", as.POSIXct(idx$datetime_end, "UTC"))
data.table::set(idx, NULL, "datetime_start", as.POSIXct(idx$datetime_start, "UTC"))
}
if ("datetime_end" %in% names(idx)) {
# to avoid No visible binding for global variable check NOTE
datetime_end <- NULL
if (is.character(idx$datetime_end)) {
idx[J(""), on = "datetime_end", datetime_end := NA]
}
data.table::set(idx, NULL, "datetime_end", as.POSIXct(idx$datetime_end, "UTC"))
}

# udpate package internal stored file index
Expand Down Expand Up @@ -708,11 +721,15 @@ get_data_node <- function (speed_test = FALSE, timeout = 3) {

# locate table
l_s <- grep("<!--load block main-->", l, fixed = TRUE)
# nocov start
if (!length(l_s)) stop("Internal Error: Failed to read data node table")
# nocov end
l <- l[l_s:length(l)]
l_s <- grep("<table>", l, fixed = TRUE)[1L]
l_e <- grep("</table>", l, fixed = TRUE)[1L]
# nocov start
if (!length(l_s) || !length(l_e)) stop("Internal Error: Failed to read data node table")
# nocov end
l <- l[l_s:l_e]

# extract nodes
Expand All @@ -731,22 +748,28 @@ get_data_node <- function (speed_test = FALSE, timeout = 3) {
}, NA_character_)
status <- status[!is.na(status)]

# nocov start
if (length(nodes) != length(status)) stop("Internal Error: Failed to read data node table")
# nocov end
res <- data.table::data.table(data_node = nodes, status = status)
data.table::setorderv(res, "status", -1)

if (!speed_test) return(res)

# nocov start
if (!requireNamespace("pingr", quietly = TRUE)) {
stop("'epwshiftr' relies on the package 'pingr' to perform speed test",
"please add this to your library with install.packages('pingr') and try again."
)
}
# nocov end

# nocov start
if (!length(nodes_up <- res[status == "UP", data_node])) {
message("No working data nodes available now. Skip speed test")
return(res)
}
# nocov end

# use the pingr package to test the connection speed
speed <- vapply(nodes_up, function (node) {
Expand Down
Loading

0 comments on commit e656b9a

Please sign in to comment.