diff --git a/DESCRIPTION b/DESCRIPTION index b848e806..e1252097 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,16 +1,17 @@ Package: amadeus Title: Accessing and Analyzing Large-Scale Environmental Data -Version: 1.1.1 +Version: 1.1.6 Authors@R: c( person(given = "Mitchell", family = "Manware", role = c("aut", "ctb"), comment = c(ORCID = "0009-0003-6440-6106")), person(given = "Insang", family = "Song", role = c("aut", "ctb"), comment = c(ORCID = "0000-0001-8732-3256")), person(given = "Eva", family = "Marques", role = c("aut", "ctb"), comment = c(ORCID = "0000-0001-9817-6546")), person(given = "Mariana", family = "Alifa Kassien", role = c("aut", "ctb"), comment = c(ORCID = "0000-0003-2295-406X")), + person(given = "Elizabeth", family = "Scholl", role = c("ctb"), comment = c(ORCID = "0000-0003-2727-1954")), person(given = "Kyle", family = "Messier", role = c("aut", "cre"), email = "kyle.messier@nih.gov", comment = c(ORCID = "0000-0001-9508-9623")), person("Spatiotemporal Exposures and Toxicology Group", role = c("cph")) ) Maintainer: Kyle Messier -Description: Functions are designed to facilitate access to and utility with large scale, publicly available environmental data in R. The package contains functions for downloading raw data files from web URLs (download_data()), processing the raw data files into clean spatial objects (process_covariates()), and extracting values from the spatial data objects at point and polygon locations (calc_covariates()). These functions call a series of source-specific functions which are tailored to each data sources/datasets particular URL structure, data format, and spatial/temporal resolution. The functions are tested, versioned, and open source and open access. For calc_sedc() method details, see Messier, Akita, and Serre (2012) . +Description: Functions are designed to facilitate access to and utility with large scale, publicly available environmental data in R. The package contains functions for downloading raw data files from web URLs (download_data()), processing the raw data files into clean spatial objects (process_covariates()), and extracting values from the spatial data objects at point and polygon locations (calculate_covariates()). These functions call a series of source-specific functions which are tailored to each data sources/datasets particular URL structure, data format, and spatial/temporal resolution. The functions are tested, versioned, and open source and open access. For sum_edc() method details, see Messier, Akita, and Serre (2012) . Depends: R (>= 4.1.0) Imports: dplyr, sf, sftime, stats, terra, methods, data.table, httr, rvest, exactextractr, utils, stringi, testthat (>= 3.0.0), parallelly, stars, future, future.apply, tidyr, rlang, nhdplusTools, archive, collapse, Rdpack Suggests: diff --git a/NAMESPACE b/NAMESPACE index 6c375f51..44e43b2f 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -3,33 +3,34 @@ export(apply_extent) export(as_mysftime) export(calc_check_time) -export(calc_covariates) -export(calc_ecoregion) -export(calc_geos) -export(calc_gmted) -export(calc_gridmet) -export(calc_hms) -export(calc_koppen_geiger) -export(calc_lagged) -export(calc_merra2) export(calc_message) -export(calc_modis_daily) -export(calc_modis_par) -export(calc_narr) -export(calc_nei) -export(calc_nlcd) export(calc_prepare_locs) export(calc_return_locs) -export(calc_sedac_groads) -export(calc_sedac_population) -export(calc_sedc) export(calc_setcolumns) -export(calc_temporal_dummies) -export(calc_terraclimate) export(calc_time) -export(calc_tri) export(calc_worker) +export(calculate_covariates) +export(calculate_ecoregion) +export(calculate_geos) +export(calculate_gmted) +export(calculate_gridmet) +export(calculate_hms) +export(calculate_koppen_geiger) +export(calculate_lagged) +export(calculate_merra2) +export(calculate_modis_daily) +export(calculate_modis_par) +export(calculate_narr) +export(calculate_nei) +export(calculate_nlcd) +export(calculate_sedac_groads) +export(calculate_sedac_population) +export(calculate_temporal_dummies) +export(calculate_terraclimate) +export(calculate_tri) +export(check_destfile) export(check_for_null_parameters) +export(check_geom) export(check_mysf) export(check_mysftime) export(check_url_status) @@ -117,6 +118,7 @@ export(sftime_as_spatvector) export(spatraster_as_sftime) export(spatrds_as_sftime) export(spatvector_as_sftime) +export(sum_edc) export(test_download_functions) import(rvest) import(sf) diff --git a/R/calculate_covariates.R b/R/calculate_covariates.R index 600b5b61..27e46663 100644 --- a/R/calculate_covariates.R +++ b/R/calculate_covariates.R @@ -18,22 +18,22 @@ #' function. #' @note `covariate` argument value is converted to lowercase. #' @seealso -#' * \code{\link{calc_modis_par}}: "modis", "MODIS" -#' * \code{\link{calc_koppen_geiger}}: "koppen-geiger", "koeppen-geiger", "koppen" -#' * \code{\link{calc_ecoregion}}: "ecoregion", "ecoregions" -#' * \code{\link{calc_temporal_dummies}}: "dummies", "Dummies" -#' * \code{\link{calc_hms}}: "hms", "smoke", "HMS" -#' * \code{\link{calc_gmted}}: "gmted", "GMTED" -#' * \code{\link{calc_narr}}: "narr", "NARR" -#' * \code{\link{calc_geos}}: "geos", "geos_cf", "GEOS" -#' * \code{\link{calc_sedac_population}}: "population", "sedac_population" -#' * \code{\link{calc_sedac_groads}}: "roads", "groads", "sedac_groads" -#' * \code{\link{calc_nlcd}}: "nlcd", "NLCD" -#' * \code{\link{calc_tri}}: "tri", "TRI" -#' * \code{\link{calc_nei}}: "nei", "NEI" -#' * \code{\link{calc_merra2}}: "merra", "MERRA", "merra2", "MERRA2" -#' * \code{\link{calc_gridmet}}: "gridMET", "gridmet" -#' * \code{\link{calc_terraclimate}}: "terraclimate", "TerraClimate" +#' * \code{\link{calculate_modis_par}}: "modis", "MODIS" +#' * \code{\link{calculate_koppen_geiger}}: "koppen-geiger", "koeppen-geiger", "koppen" +#' * \code{\link{calculate_ecoregion}}: "ecoregion", "ecoregions" +#' * \code{\link{calculate_temporal_dummies}}: "dummies", "Dummies" +#' * \code{\link{calculate_hms}}: "hms", "smoke", "HMS" +#' * \code{\link{calculate_gmted}}: "gmted", "GMTED" +#' * \code{\link{calculate_narr}}: "narr", "NARR" +#' * \code{\link{calculate_geos}}: "geos", "geos_cf", "GEOS" +#' * \code{\link{calculate_sedac_population}}: "population", "sedac_population" +#' * \code{\link{calculate_sedac_groads}}: "roads", "groads", "sedac_groads" +#' * \code{\link{calculate_nlcd}}: "nlcd", "NLCD" +#' * \code{\link{calculate_tri}}: "tri", "TRI" +#' * \code{\link{calculate_nei}}: "nei", "NEI" +#' * \code{\link{calculate_merra2}}: "merra", "MERRA", "merra2", "MERRA2" +#' * \code{\link{calculate_gridmet}}: "gridMET", "gridmet" +#' * \code{\link{calculate_terraclimate}}: "terraclimate", "TerraClimate" #' @return Calculated covariates as a data.frame or SpatVector object #' @author Insang Song #' @examples @@ -41,7 +41,7 @@ #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_covariates( +#' calculate_covariates( #' covariate = "narr", #' from = narr, # derived from process_covariates() example #' locs = loc, @@ -51,7 +51,7 @@ #' } #' @export # nolint end -calc_covariates <- +calculate_covariates <- function( covariate = c("modis", "koppen-geiger", "koeppen-geiger", "koppen", "koeppen", @@ -75,28 +75,28 @@ calc_covariates <- # select function to run what_to_run <- switch(covariate, - modis = calc_modis_par, - ecoregion = calc_ecoregion, - ecoregions = calc_ecoregion, - koppen = calc_koppen_geiger, - narr = calc_narr, - nlcd = calc_nlcd, - smoke = calc_hms, - hms = calc_hms, - sedac_groads = calc_sedac_groads, - roads = calc_sedac_groads, - groads = calc_sedac_groads, - sedac_population = calc_sedac_population, - population = calc_sedac_population, - nei = calc_nei, - tri = calc_tri, - geos = calc_geos, - gmted = calc_gmted, - dummies = calc_temporal_dummies, - merra = calc_merra2, - merra2 = calc_merra2, - gridmet = calc_gridmet, - terraclimate = calc_terraclimate + modis = calculate_modis_par, + ecoregion = calculate_ecoregion, + ecoregions = calculate_ecoregion, + koppen = calculate_koppen_geiger, + narr = calculate_narr, + nlcd = calculate_nlcd, + smoke = calculate_hms, + hms = calculate_hms, + sedac_groads = calculate_sedac_groads, + roads = calculate_sedac_groads, + groads = calculate_sedac_groads, + sedac_population = calculate_sedac_population, + population = calculate_sedac_population, + nei = calculate_nei, + tri = calculate_tri, + geos = calculate_geos, + gmted = calculate_gmted, + dummies = calculate_temporal_dummies, + merra = calculate_merra2, + merra2 = calculate_merra2, + gridmet = calculate_gridmet, + terraclimate = calculate_terraclimate ) res_covariate <- @@ -134,9 +134,9 @@ calc_covariates <- #' @param locs sf/SpatVector. Unique locs. Should include #' a unique identifier field named `locs_id` #' @param locs_id character(1). Name of unique identifier. -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. #' @seealso [`process_koppen_geiger`] #' @return a data.frame or SpatVector object @@ -158,7 +158,7 @@ calc_covariates <- #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_koppen_geiger( +#' calculate_koppen_geiger( #' from = kg, # derived from process_koppen_geiger() example #' locs = loc, #' locs_id = "id", @@ -167,7 +167,7 @@ calc_covariates <- #' } #' @export # locs (locs), from (from), locs_id (id_col), variables -calc_koppen_geiger <- +calculate_koppen_geiger <- function( from = NULL, locs = NULL, @@ -202,7 +202,7 @@ calc_koppen_geiger <- ) locs_kg_extract[[locs_id]] <- locs_df[, 1] - if (geom) { + if (geom %in% c("sf", "terra")) { locs_kg_extract$geometry <- locs_df[, 2] } colnames(locs_kg_extract)[2] <- "value" @@ -247,7 +247,7 @@ calc_koppen_geiger <- df_ae_separated ) names(kg_extracted)[1] <- locs_id - if (geom) { + if (geom %in% c("sf", "terra")) { names(kg_extracted)[2:3] <- c("geometry", "description") sites_return <- calc_return_locs( covar = kg_extracted, @@ -282,9 +282,9 @@ calc_koppen_geiger <- #' Maximum possible value is `2^31 - 1`. Only valid when #' `mode = "exact"`. #' See [`exactextractr::exact_extract`] for details. -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param nthreads integer(1). Number of threads to be used #' @param ... Placeholders. #' @note NLCD is available in U.S. only. Users should be aware of @@ -317,7 +317,7 @@ calc_koppen_geiger <- #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_nlcd( +#' calculate_nlcd( #' from = nlcd, # derived from process_nlcd() example #' locs = loc, #' locs_id = "id", @@ -326,15 +326,17 @@ calc_koppen_geiger <- #' ) #' } #' @export -calc_nlcd <- function(from, - locs, - locs_id = "site_id", - mode = c("exact", "terra"), - radius = 1000, - max_cells = 5e7, - geom = FALSE, - nthreads = 1L, - ...) { +calculate_nlcd <- function( + from, + locs, + locs_id = "site_id", + mode = c("exact", "terra"), + radius = 1000, + max_cells = 5e7, + geom = FALSE, + nthreads = 1L, + ... +) { # check inputs mode <- match.arg(mode) if (!is.numeric(radius)) { @@ -439,7 +441,7 @@ calc_nlcd <- function(from, # merge locs_df with nlcd class fractions new_data_vect <- cbind(locs_df, as.integer(year), nlcd_at_bufs) - if (geom) { + if (geom %in% c("sf", "terra")) { names(new_data_vect)[1:3] <- c(locs_id, "geometry", "time") } else { names(new_data_vect)[1:2] <- c(locs_id, "time") @@ -466,9 +468,9 @@ calc_nlcd <- function(from, #' @param locs sf/SpatVector. Unique locs. Should include #' a unique identifier field named `locs_id` #' @param locs_id character(1). Name of unique identifier. -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. #' @seealso [`process_ecoregion`] #' @return a data.frame or SpatVector object object with dummy variables and @@ -483,7 +485,7 @@ calc_nlcd <- function(from, #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_ecoregion( +#' calculate_ecoregion( #' from = ecoregion, # derived from process_ecoregion() example #' locs = loc, #' locs_id = "id", @@ -491,7 +493,7 @@ calc_nlcd <- function(from, #' ) #' } #' @export -calc_ecoregion <- +calculate_ecoregion <- function( from = NULL, locs, @@ -580,9 +582,9 @@ calc_ecoregion <- #' @param max_cells integer(1). Maximum number of cells to be read at once. #' Higher values will expedite processing, but will increase memory usage. #' Maximum possible value is `2^31 - 1`. -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' See [`exactextractr::exact_extract`] for details. #' @param ... Placeholders. #' @description The function operates at MODIS/VIIRS products @@ -596,7 +598,7 @@ calc_ecoregion <- #' @seealso #' * Preprocessing: [process_modis_merge()], [process_modis_swath()], #' [process_blackmarble()] -#' * Parallelization: [calc_modis_par()] +#' * Parallelization: [calculate_modis_par()] #' @author Insang Song #' @return a data.frame or SpatVector object. #' @importFrom terra extract @@ -612,7 +614,7 @@ calc_ecoregion <- #' ## amount of data which is not included in the package. #' \dontrun{ #' locs <- data.frame(lon = -78.8277, lat = 35.95013, id = "001") -#' calc_modis_daily( +#' calculate_modis_daily( #' from = mod06l2_warp, # dervied from process_modis() example #' locs = locs, #' locs_id = "id", @@ -624,7 +626,7 @@ calc_ecoregion <- #' ) #' } #' @export -calc_modis_daily <- function( +calculate_modis_daily <- function( from = NULL, locs = NULL, locs_id = "site_id", @@ -694,7 +696,8 @@ calc_modis_daily <- function( name_range <- seq(ncol(extracted) - name_offset + 1, ncol(extracted), 1) colnames(extracted)[name_range] <- name_extracted extracted$time <- as.POSIXlt(date) - if (geom) { + check_geom(geom) + if (geom %in% c("sf", "terra")) { # convert to base date, as terra::vect does not like class "POSIXlt" extracted$time <- as.Date.POSIXlt(extracted$time) # location ID with geometry @@ -759,11 +762,12 @@ calc_modis_daily <- function( #' Higher values will expedite processing, but will increase memory usage. #' Maximum possible value is `2^31 - 1`. #' See [`exactextractr::exact_extract`] for details. -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Arguments passed to `preprocess`. -#' @description `calc_modis_par` essentially runs [`calc_modis_daily`] function +# nolint start +#' @description `calculate_modis_par` essentially runs [`calculate_modis_daily`] function #' in each thread (subprocess). Based on daily resolution, each day's workload #' will be distributed to each thread. With `product` argument, #' the files are processed by a customized function where the unique structure @@ -771,6 +775,7 @@ calc_modis_daily <- function( #' argument should be carefully selected in consideration of the machine's #' CPU and memory capacities as products have their own memory pressure. #' `locs` should be `sf` object as it is exportable to parallel workers. +# nolint end #' @note Overall, this function and dependent routines assume that the file #' system can handle concurrent access to the (network) disk by multiple #' processes. File system characteristics, package versions, and hardware @@ -806,7 +811,7 @@ calc_modis_daily <- function( #' #' This function leverages the calculation of single-day MODIS #' covariates: -#' * [`calc_modis_daily()`] +#' * [`calculate_modis_daily()`] #' #' Also, for preprocessing, please refer to: #' * [`process_modis_merge()`] @@ -829,7 +834,7 @@ calc_modis_daily <- function( #' \dontrun{ #' locs <- data.frame(lon = -78.8277, lat = 35.95013, id = "001") #' locs <- terra::vect(locs, geom = c("lon", "lat"), crs = "EPSG:4326") -#' calc_modis_par( +#' calculate_modis_par( #' from = #' list.files("./data", pattern = "VNP46A2.", full.names = TRUE), #' locs = locs, @@ -843,7 +848,7 @@ calc_modis_daily <- function( #' ) #' } #' @export -calc_modis_par <- +calculate_modis_par <- function( from = NULL, locs = NULL, @@ -860,6 +865,7 @@ calc_modis_par <- geom = FALSE, ... ) { + check_geom(geom) if (!is.function(preprocess)) { stop("preprocess should be one of process_modis_merge, process_modis_swath, or process_blackmarble.") @@ -960,7 +966,7 @@ process_modis_swath, or process_blackmarble.") radius[k]) extracted <- try( - calc_modis_daily( + calculate_modis_daily( locs = locs_input, from = vrt_today, locs_id = locs_id, @@ -1000,14 +1006,14 @@ process_modis_swath, or process_blackmarble.") future.seed = TRUE ) calc_results <- do.call(dplyr::bind_rows, calc_results) - if (geom) { + if (geom %in% c("sf", "terra")) { # merge calc_results_return <- merge( - locs, + locs_input, calc_results, by = locs_id ) - if ("sf" %in% class(calc_results_return)) { + if (geom == "terra") { calc_results_return <- terra::vect(calc_results_return) } } else { @@ -1029,9 +1035,9 @@ process_modis_swath, or process_blackmarble.") #' Default is `"site_id"`. #' @param year integer. Year domain to dummify. #' Default is \code{seq(2018L, 2022L)}. -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. #' @return a data.frame or SpatVector object #' @author Insang Song @@ -1044,14 +1050,14 @@ process_modis_swath, or process_blackmarble.") #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_temporal_dummies( +#' calculate_temporal_dummies( #' locs = loc, #' locs_id = "id", #' year = seq(2018L, 2022L) #' ) #' } #' @export -calc_temporal_dummies <- +calculate_temporal_dummies <- function( locs, locs_id = "site_id", @@ -1059,6 +1065,7 @@ calc_temporal_dummies <- geom = FALSE, ... ) { + check_geom(geom) if (!methods::is(locs, "data.frame")) { stop("Argument locs is not a data.frame.\n") } @@ -1125,17 +1132,18 @@ calc_temporal_dummies <- # nolint start #' Calculate Sum of Exponentially Decaying Contributions (SEDC) covariates -#' @param from `SpatVector` object. Locations where each SEDC is calculated. -#' @param locs `SpatVector` object. Locations where -#' the sum of SEDCs are calculated. +#' @param from `SpatVector`(1). Point locations which contain point-source +#' covariate data. +#' @param locs sf/SpatVector(1). Locations where the sum of exponentially +#' decaying contributions are calculated. #' @param locs_id character(1). Name of the unique id field in `point_to`. #' @param sedc_bandwidth numeric(1). #' Distance at which the source concentration is reduced to #' `exp(-3)` (approximately -95 %) #' @param target_fields character(varying). Field names in characters. -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @return a data.frame (tibble) or SpatVector object with input field names with #' a suffix \code{"_sedc"} where the sums of EDC are stored. #' Additional attributes are attached for the EDC information. @@ -1144,7 +1152,7 @@ calc_temporal_dummies <- #' - `attr(result, "sedc_threshold")``: the threshold distance #' at which emission source points are excluded beyond that #' @note The function is originally from -#' [chopin](https://github.com/NIEHS/chopin) +#' [chopin](https://github.com/ropensci/chopin) #' Distance calculation is done with terra functions internally. #' Thus, the function internally converts sf objects in #' \code{point_*} arguments to terra. @@ -1155,8 +1163,6 @@ calc_temporal_dummies <- #' #' \insertRef{web_sedctutorial_package}{amadeus} #' @examples -#' library(terra) -#' library(sf) #' set.seed(101) #' ncpath <- system.file("gpkg/nc.gpkg", package = "sf") #' nc <- terra::vect(ncpath) @@ -1170,7 +1176,7 @@ calc_temporal_dummies <- #' pnt_from$val2 <- rgamma(10L, 2, 1) #' #' vals <- c("val1", "val2") -#' calc_sedc(pnt_locs, pnt_from, "NAME", 1e4, vals) +#' sum_edc(pnt_locs, pnt_from, "NAME", 1e4, vals) #' @importFrom dplyr as_tibble #' @importFrom dplyr left_join #' @importFrom dplyr summarize @@ -1185,7 +1191,7 @@ calc_temporal_dummies <- #' @importFrom rlang sym #' @export # nolint end -calc_sedc <- +sum_edc <- function( from = NULL, locs = NULL, @@ -1194,6 +1200,7 @@ calc_sedc <- target_fields = NULL, geom = FALSE ) { + check_geom(geom) if (!methods::is(locs, "SpatVector")) { locs <- try(terra::vect(locs)) } @@ -1265,7 +1272,7 @@ The result may not be accurate.\n", names(res_sedc)[idx_air] <- sprintf("%s_%05d", names(res_sedc)[idx_air], sedc_bandwidth) - if (geom) { + if (geom %in% c("sf", "terra")) { res_sedc <- merge( terra::as.data.frame(locs, geom = "WKT")[, c("site_id", "geometry")], res_sedc, @@ -1300,14 +1307,14 @@ The result may not be accurate.\n", #' Default is `"site_id"`. #' @param radius Circular buffer radius. #' Default is \code{c(1000, 10000, 50000)} (meters) -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. #' @author Insang Song, Mariana Kassien #' @return a data.frame or SpatVector object #' @note U.S. context. -#' @seealso [`calc_sedc`], [`process_tri`] +#' @seealso [`sum_edc`], [`process_tri`] #' @importFrom terra vect #' @importFrom terra crs #' @importFrom terra nearby @@ -1328,7 +1335,7 @@ The result may not be accurate.\n", #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_tri( +#' calculate_tri( #' from = tri, # derived from process_tri() example #' locs = loc, #' locs_id = "id", @@ -1336,7 +1343,7 @@ The result may not be accurate.\n", #' ) #' } #' @export -calc_tri <- function( +calculate_tri <- function( from = NULL, locs, locs_id = "site_id", @@ -1344,6 +1351,7 @@ calc_tri <- function( geom = FALSE, ... ) { + check_geom(geom) if (!methods::is(locs, "SpatVector")) { if (methods::is(locs, "sf")) { locs <- terra::vect(locs) @@ -1365,7 +1373,7 @@ calc_tri <- function( Map( function(x) { locs_tri_s <- - calc_sedc( + sum_edc( locs = locs_re, from = from, locs_id = locs_id, @@ -1403,9 +1411,9 @@ calc_tri <- function( #' @param locs sf/SpatVector. Locations at NEI values are joined. #' @param locs_id character(1). Unique site identifier column name. #' Unused but kept for compatibility. -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. #' @author Insang Song, Ranadeep Daw #' @seealso [`process_nei`] @@ -1419,20 +1427,21 @@ calc_tri <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_nei( +#' calculate_nei( #' from = nei, # derived from process_nei example #' locs = loc, #' locs_id = "id" #' ) #' } #' @export -calc_nei <- function( +calculate_nei <- function( from = NULL, locs = NULL, locs_id = "site_id", geom = FALSE, ... ) { + check_geom(geom) if (!methods::is(locs, "SpatVector")) { locs <- try(terra::vect(locs)) if (inherits(locs, "try-error")) { @@ -1467,9 +1476,9 @@ calc_nei <- function( #' containing identifier for each unique coordinate location. #' @param radius integer(1). Circular buffer distance around site locations. #' (Default = 0). -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. #' @seealso [process_hms()] #' @author Mitchell Manware @@ -1483,7 +1492,7 @@ calc_nei <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_hms( +#' calculate_hms( #' from = hms, # derived from process_hms() example #' locs = loc, #' locs_id = "id", @@ -1492,7 +1501,7 @@ calc_nei <- function( #' ) #' } #' @export -calc_hms <- function( +calculate_hms <- function( from, locs, locs_id = NULL, @@ -1504,6 +1513,7 @@ calc_hms <- function( #### from == character indicates no wildfire smoke plumes are present #### return 0 for all densities, locs and dates if (is.character(from)) { + check_geom(geom) message(paste0( "Inherited list of dates due to absent smoke plume polygons.\n" )) @@ -1636,7 +1646,7 @@ calc_hms <- function( #### define column names colname_common <- c(locs_id, "time", binary_colname) - if (geom) { + if (geom %in% c("sf", "terra")) { sites_extracted <- merge(sites_extracted, sites_id, @@ -1700,9 +1710,9 @@ calc_hms <- function( #' (Default = 0). #' @param fun character(1). Function used to summarize multiple raster cells #' within sites location buffer (Default = `mean`). -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders #' @author Mitchell Manware #' @seealso [`process_gmted()`] @@ -1718,7 +1728,7 @@ calc_hms <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_gmted( +#' calculate_gmted( #' from = gmted, # derived from process_gmted() example #' locs = loc, #' locs_id = "id", @@ -1728,7 +1738,7 @@ calc_hms <- function( #' ) #' } #' @export -calc_gmted <- function( +calculate_gmted <- function( from, locs, locs_id = NULL, @@ -1788,7 +1798,7 @@ calc_gmted <- function( "_", sprintf("%05d", as.integer(radius)) ) - if (geom) { + if (geom %in% c("sf", "terra")) { #### convert integer to numeric sites_extracted[, 4] <- as.numeric(sites_extracted[, 4]) names(sites_extracted) <- c(locs_id, "geometry", "time", variable_name) @@ -1823,9 +1833,9 @@ calc_gmted <- function( #' (Default = 0). #' @param fun character(1). Function used to summarize multiple raster cells #' within sites location buffer (Default = `mean`). -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders #' @author Mitchell Manware #' @seealso [`process_narr`] @@ -1841,7 +1851,7 @@ calc_gmted <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_narr( +#' calculate_narr( #' from = narr, # derived from process_narr() example #' locs = loc, #' locs_id = "id", @@ -1851,7 +1861,7 @@ calc_gmted <- function( #' ) #' } #' @export -calc_narr <- function( +calculate_narr <- function( from, locs, locs_id = NULL, @@ -1917,9 +1927,9 @@ calc_narr <- function( #' (Default = 0). #' @param fun character(1). Function used to summarize multiple raster cells #' within sites location buffer (Default = `mean`). -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. #' @author Mitchell Manware #' @seealso [process_geos()] @@ -1936,7 +1946,7 @@ calc_narr <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_geos( +#' calculate_geos( #' from = geos, # derived from process_geos() example #' locs = loc, #' locs_id = "id", @@ -1946,7 +1956,7 @@ calc_narr <- function( #' ) #' } #' @export -calc_geos <- function( +calculate_geos <- function( from, locs, locs_id = NULL, @@ -2002,9 +2012,9 @@ calc_geos <- function( #' (Default = 0). #' @param fun character(1). Function used to summarize multiple raster cells #' within sites location buffer (Default = `mean`). -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders #' @author Mitchell Manware #' @seealso [process_sedac_population()] @@ -2015,7 +2025,7 @@ calc_geos <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_sedac_population( +#' calculate_sedac_population( #' from = pop, # derived from process_sedac_population() example #' locs = loc, #' locs_id = "id", @@ -2025,7 +2035,7 @@ calc_geos <- function( #' ) #' } #' @export -calc_sedac_population <- function( +calculate_sedac_population <- function( from, locs, locs_id = NULL, @@ -2104,14 +2114,14 @@ calc_sedac_population <- function( #' (Default = 1000). #' @param fun function(1). Function used to summarize the length of roads #' within sites location buffer (Default is `sum`). -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. # nolint start #' @note Unit is km / sq km. The returned `data.frame` object contains a #' `$time` column to represent the temporal range covered by the -#' dataset. For more information, see . +#' dataset. For more information, see . # nolint end #' @author Insang Song #' @seealso [`process_sedac_groads`] @@ -2132,7 +2142,7 @@ calc_sedac_population <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_sedac_groads( +#' calculate_sedac_groads( #' from = groads, # derived from process_sedac_groads() example #' locs = loc, #' locs_id = "id", @@ -2142,7 +2152,7 @@ calc_sedac_population <- function( #' ) #' } #' @export -calc_sedac_groads <- function( +calculate_sedac_groads <- function( from = NULL, locs = NULL, locs_id = NULL, @@ -2198,7 +2208,7 @@ calc_sedac_groads <- function( ) #### time period from_clip$description <- "1980 - 2010" - if (geom) { + if (geom %in% c("sf", "terra")) { from_clip$geometry <- sites_list[[2]]$geometry from_clip_reorder <- from_clip[, c(1, 5, 4, 2, 3)] } else { @@ -2229,12 +2239,12 @@ calc_sedac_groads <- function( #' (Default = 0). #' @param fun character(1). Function used to summarize multiple raster cells #' within sites location buffer (Default = `mean`). -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders #' @author Mitchell Manware -#' @seealso [calc_geos()], [process_merra2()] +#' @seealso [calculate_geos()], [process_merra2()] #' @return a data.frame or SpatVector object #' @importFrom terra vect #' @importFrom terra buffer @@ -2248,7 +2258,7 @@ calc_sedac_groads <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_merra2( +#' calculate_merra2( #' from = merra2, # derived from process_merra2() example #' locs = loc, #' locs_id = "id", @@ -2258,7 +2268,7 @@ calc_sedac_groads <- function( #' ) #' } #' @export -calc_merra2 <- function( +calculate_merra2 <- function( from, locs, locs_id = NULL, @@ -2321,9 +2331,9 @@ calc_merra2 <- function( #' (Default = 0). #' @param fun character(1). Function used to summarize multiple raster cells #' within sites location buffer (Default = `mean`). -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. #' @author Mitchell Manware #' @seealso [`process_gridmet()`] @@ -2339,7 +2349,7 @@ calc_merra2 <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_gridmet( +#' calculate_gridmet( #' from = gridmet, # derived from process_gridmet() example #' locs = loc, #' locs_id = "id", @@ -2349,7 +2359,7 @@ calc_merra2 <- function( #' ) #' } #' @export -calc_gridmet <- function( +calculate_gridmet <- function( from, locs, locs_id = NULL, @@ -2404,9 +2414,9 @@ calc_gridmet <- function( #' (Default = 0). #' @param fun character(1). Function used to summarize multiple raster cells #' within sites location buffer (Default = `mean`). -#' @param geom logical(1). Should the function return a `SpatVector`? -#' Default is `FALSE`. The coordinate reference system of the `SpatVector` is -#' that of `from.` +#' @param geom FALSE/"sf"/"terra".. Should the function return with geometry? +#' Default is `FALSE`, options with geometry are "sf" or "terra". The +#' coordinate reference system of the `sf` or `SpatVector` is that of `from.` #' @param ... Placeholders. #' @note #' TerraClimate data has monthly temporal resolution, so the `$time` column @@ -2426,7 +2436,7 @@ calc_gridmet <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' calc_terraclimate( +#' calculate_terraclimate( #' from = terraclimate, # derived from process_terraclimate() example #' locs = loc, #' locs_id = "id", @@ -2436,7 +2446,7 @@ calc_gridmet <- function( #' ) #' } #' @export -calc_terraclimate <- function( +calculate_terraclimate <- function( from = NULL, locs = NULL, locs_id = NULL, @@ -2480,7 +2490,7 @@ calc_terraclimate <- function( # nolint start #' Calculate temporally lagged covariates #' @description -#' The \code{calc_lagged()} function calculates daily temporal lagged covariates +#' The \code{calculate_lagged()} function calculates daily temporal lagged covariates #' from the output of \code{calculate_covariates()} or \code{calc_*()}. #' @param from data.frame(1). A `data.frame` containing calculated covariates #' returned from \code{calculate_covariates()} or \code{calc_*()}. @@ -2492,15 +2502,15 @@ calc_terraclimate <- function( #' @param geom logical(1). Should the function return a `SpatVector`? #' Default is `FALSE`. The coordinate reference system of the `SpatVector` is #' that of `from.` To return as a `SpatVector`, `from` must also be a `SpatVector` -#' @seealso [calc_covariates()] +#' @seealso [calculate_covariates()] #' @note #' In order to calculate temporally lagged covariates, `from` must contain at #' least the number of lag days before the desired start date. For example, if #' `date = c("2024-01-01", "2024-01-31)` and `lag = 1`, `from` must contain data #' starting at 2023-12-31. -#' If `from` contains geometry features, `calc_lagged` will return a column +#' If `from` contains geometry features, `calculate_lagged` will return a column #' with geometry features of the same name. -#' \code{calc_lagged()} assumes that all columns other than `time_id`, +#' \code{calculate_lagged()} assumes that all columns other than `time_id`, #' `locs_id`, and fixed columns of "lat" and "lon", follow the genre, variable, #' lag, buffer radius format adopted in \code{calc_setcolumns()}. #' @return a `data.frame` object @@ -2511,7 +2521,7 @@ calc_terraclimate <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -#' terracliamte_covar <- calc_terraclimate( +#' terracliamte_covar <- calculate_terraclimate( #' from = terraclimate, # derived from process_terraclimate() example #' locs = loc, #' locs_id = "id", @@ -2519,7 +2529,7 @@ calc_terraclimate <- function( #' fun = "mean", #' geom = FALSE #' ) -#' calc_lagged( +#' calculate_lagged( #' from = terracliamte_covar, #' locs_id = "id", #' date = c("2023-01-02", "2023-01-10"), @@ -2529,18 +2539,19 @@ calc_terraclimate <- function( #' } # nolint end #' @export -calc_lagged <- function( +calculate_lagged <- function( from, date, lag, locs_id, time_id = "time", geom = FALSE) { + check_geom(geom) #### check years stopifnot(length(date) == 2) date <- date[order(as.Date(date))] #### geom and from - if (geom && !("SpatVector" %in% class(from))) { + if (geom %in% c("sf", "terra") && !("SpatVector" %in% class(from))) { stop( paste0( "To return with geometry, `from` must be a `SpatVector` object.\n" @@ -2606,7 +2617,7 @@ calc_lagged <- function( #### merge with other locations variables_merge <- rbind(variables_merge, variables_return_date) } - if (geom) { + if (geom %in% c("sf", "terra")) { variables_merge <- merge(variables_merge, geoms) } variables_return <- calc_return_locs( diff --git a/R/calculate_covariates_auxiliary.R b/R/calculate_covariates_auxiliary.R index 3f57f85a..db92210a 100644 --- a/R/calculate_covariates_auxiliary.R +++ b/R/calculate_covariates_auxiliary.R @@ -209,7 +209,8 @@ calc_message <- function( #' @param radius integer(1). Circular buffer distance around site locations. #' (Default = 0). Passed from \code{calc_\*()}. #' @param geom logical(1). Should the geometry of `locs` be returned in the -#' `data.frame`? Default is `FALSE`. +#' `data.frame`? Default is `FALSE`, options "sf" or "terra" will preserve +#' geometry, but will use `terra` for extraction. #' @return A `list` containing `SpatVector` and `data.frame` objects #' @seealso [`process_locs_vector()`], [`check_for_null_parameters()`] #' @keywords internal auxiliary @@ -236,6 +237,9 @@ calc_prepare_locs <- function( radius ) #### site identifiers and geometry + # check geom + check_geom(geom) + if (geom %in% c("sf", "terra")) geom <- TRUE if (geom) { sites_id <- subset( terra::as.data.frame(sites_e, geom = "WKT"), @@ -501,8 +505,8 @@ calc_worker <- function( #' @param POSIXt logical(1). Should the time values in `covar` be of class #' `POSIXt`? If `FALSE`, the time values will be checked for integer class #' (year and year-month). -#' @param geom logical(1). Should `covar` be returned as a -#' `data.frame`? Default is `FALSE`. +#' @param geom FALSE/"sf"/"terra". Should `covar` be returned as a +#' `data.frame`? Default is `FALSE`, options with geometry are "sf" or "terra". #' @param crs terra::crs(1). Coordinate reference system (inherited from #' `from`). #' @importFrom terra vect @@ -523,7 +527,7 @@ calc_return_locs <- function( } # nolint end # if geom, convert to and return SpatVector - if (geom) { + if (geom %in% c("terra", "sf")) { if ("geometry" %in% names(covar)) { covar_return <- terra::vect( covar, @@ -537,8 +541,26 @@ calc_return_locs <- function( crs = crs ) } - return(covar_return) + if (geom == "terra") { + return(covar_return) + } else if (geom == "sf") { + return(sf::st_as_sf(covar_return)) + } } else { return(data.frame(covar)) } } + +#' Check that `geom` value is one of `FALSE`, `"sf"`, or `"terra"` +#' @description Check that `geom` value is one of `FALSE`, `"sf"`, +#' or `"terra"`. +#' @param geom FALSE/"sf"/"terra".' +#' @keywords internal auxiliary +#' @author Mitchell Manware +#' @return NULL; will stop if `geom` is not one of the three options +#' @export +check_geom <- function(geom) { + if (!geom %in% c(FALSE, "sf", "terra")) { + stop("`geom` must be one of FALSE, 'sf', or 'terra'.") + } +} diff --git a/R/download.R b/R/download.R index e040d79d..b214ac64 100644 --- a/R/download.R +++ b/R/download.R @@ -47,6 +47,7 @@ #' sub-directories within \code{directory_to_save}. File format and #' sub-directory names depend on data source and dataset of interest. #' @examples +#' \dontrun{ #' download_data( #' dataset_name = "narr", #' variables = "weasd", @@ -56,6 +57,7 @@ #' download = FALSE, # NOTE: download skipped for examples, #' remove_command = TRUE #' ) +#' } #' @export download_data <- function( @@ -107,11 +109,12 @@ download_data <- prism = download_prism ) - tryCatch( + return <- tryCatch( { what_to_run( directory_to_save = directory_to_save, acknowledgement = acknowledgement, + hash = hash, ... ) }, @@ -128,6 +131,8 @@ download_data <- ) } ) + + return(return) } # nolint start @@ -171,6 +176,7 @@ download_data <- #' @references #' \insertRef{data_usepa2023airdata}{amadeus} #' @examples +#' \dontrun{ #' download_aqs( #' parameter_code = 88101, #' resolution_temporal = "daily", @@ -181,6 +187,7 @@ download_data <- #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } # nolint end #' @export download_aqs <- @@ -254,7 +261,7 @@ download_aqs <- #### filter commands to non-existing files download_commands <- download_commands[ which( - !file.exists(download_names) + !file.exists(download_names) | file.size(download_names) == 0 ) ] #### 7. initiate "..._curl_commands.txt" @@ -343,6 +350,7 @@ download_aqs <- #' @references #' \insertRef{article_omernik2014ecoregions}{amadeus} #' @examples +#' \dontrun{ #' download_ecoregion( #' directory_to_save = tempdir(), #' acknowledgement = TRUE, @@ -350,6 +358,7 @@ download_aqs <- #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } #' @export download_ecoregion <- function( epa_certificate_path = @@ -409,7 +418,7 @@ download_ecoregion <- function( ) #### 9. concatenate download_sink(commands_txt) - if (!file.exists(download_name)) { + if (check_destfile(download_name)) { #### 10. concatenate and print download commands to "..._wget_commands.txt" #### cat command only file does not already exist or #### if size does not match URL size @@ -471,6 +480,7 @@ download_ecoregion <- function( #' @references #' \insertRef{keller_description_2021}{amadeus} #' @examples +#' \dontrun{ #' download_geos( #' collection = "aqc_tavg_1hr_g1440x721_v1", #' date = "2024-01-01", @@ -479,6 +489,7 @@ download_ecoregion <- function( #' download = FALSE, # NOTE: download skipped for examples, #' remove_command = TRUE #' ) +#' } #' @export # nolint end # nolint start: cyclocomp @@ -589,7 +600,7 @@ download_geos <- function( download_folder_name, "\n" ) - if (!file.exists(download_folder_name)) { + if (check_destfile(download_folder_name)) { #### cat command only if file does not already exist cat(download_command) } @@ -647,6 +658,7 @@ download_geos <- function( #' @references #' \insertRef{danielson_global_2011}{amadeus} #' @examples +#' \dontrun{ #' download_gmted( #' statistic = "Breakline Emphasis", #' resolution = "7.5 arc-seconds", @@ -656,6 +668,7 @@ download_geos <- function( #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } #' @export download_gmted <- function( statistic = c( @@ -739,7 +752,7 @@ download_gmted <- function( ) download_sink(commands_txt) #### 13. concatenate and print download command to "..._curl_commands.txt" - if (!file.exists(download_name)) { + if (check_destfile(download_name)) { #### cat command only if file does not already exist cat(download_command) } @@ -980,6 +993,7 @@ download_gmted <- function( #' #' \insertRef{data_gmao_merra-tavgU_3d_qdt_Np}{amadeus} #' @examples +#' \dontrun{ #' download_merra2( #' collection = "inst1_2d_int_Nx", #' date = "2024-01-01", @@ -988,6 +1002,7 @@ download_gmted <- function( #' download = FALSE, # NOTE: download skipped for examples, #' remove_command = TRUE, #' ) +#' } #' @export # nolint end # nolint start: cyclocomp @@ -1213,7 +1228,7 @@ download_merra2 <- function( download_name, "\n" ) - if (!file.exists(download_name)) { + if (check_destfile(download_name)) { #### cat command only if file does not already exist cat(download_command) } @@ -1246,7 +1261,7 @@ download_merra2 <- function( download_name_metadata, "\n" ) - if (!file.exists(download_name_metadata)) { + if (check_destfile(download_name_metadata)) { #### cat command only if file does not already exist cat(download_command_metadata) } @@ -1296,6 +1311,7 @@ download_merra2 <- function( #' @references #' \insertRef{mesinger_north_2006}{amadeus} #' @examples +#' \dontrun{ #' download_narr( #' variables = c("weasd", "omega"), #' year = 2023, @@ -1304,6 +1320,7 @@ download_merra2 <- function( #' download = FALSE, # NOTE: download skipped for examples, #' remove_command = TRUE #' ) +#' } #' @export # nolint end # nolint start: cyclocomp @@ -1385,9 +1402,9 @@ download_narr <- function( url, "\n" ) - if (!file.exists(destfile)) { + if (check_destfile(destfile)) { #### cat command if file does not already exist or if local file size - #### and the HTTP length (url file size) do not match + #### is 0 bytes cat(command) } } @@ -1446,6 +1463,7 @@ download_narr <- function( #' \insertRef{dewitz_national_2023}{amadeus}
#' \insertRef{dewitz_national_2024}{amadeus} #' @examples +#' \dontrun{ #' download_nlcd( #' collection = "Coterminous United States", #' year = 2021, @@ -1455,6 +1473,7 @@ download_narr <- function( #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } #' @export download_nlcd <- function( collection = "Coterminous United States", @@ -1538,7 +1557,7 @@ download_nlcd <- function( ) download_sink(commands_txt) #### 12. concatenate and print download command to "..._curl_commands.txt" - if (!file.exists(download_name)) { + if (check_destfile(download_name)) { #### cat command only if file does not already exist cat(download_command) } @@ -1568,7 +1587,7 @@ download_nlcd <- function( #' Download roads data #' @description #' The \code{download_sedac_groads()} function accesses and downloads -#' roads data from [NASA's Global Roads Open Access Data Set (gROADS), v1 (1980-2010)](https://sedac.ciesin.columbia.edu/data/set/groads-global-roads-open-access-v1/data-download). +#' roads data from [NASA's Global Roads Open Access Data Set (gROADS), v1 (1980-2010)](https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-groads-v1-1.00). #' @param data_region character(1). Data can be downloaded for `"Global"`, #' `"Africa"`, `"Asia"`, `"Europe"`, `"Americas"`, `"Oceania East"`, and `"Oceania West"`. #' @param data_format character(1). Data can be downloaded as `"Shapefile"` or @@ -1601,6 +1620,7 @@ download_nlcd <- function( #' @references #' \insertRef{data_ciesin2013groads}{amadeus} #' @examples +#' \dontrun{ #' download_sedac_groads( #' data_region = "Americas", #' data_format = "Shapefile", @@ -1610,6 +1630,7 @@ download_nlcd <- function( #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } #' @export download_sedac_groads <- function( data_region = c("Americas", "Global", "Africa", "Asia", "Europe", "Oceania East", "Oceania West"), @@ -1688,7 +1709,7 @@ download_sedac_groads <- function( "_curl_command.txt" ) download_sink(commands_txt) - if (!file.exists(download_name)) { + if (check_destfile(download_name)) { #### 12. concatenate and print download command to "..._curl_commands.txt" #### cat command if file does not already exist or is incomplete cat(download_command) @@ -1719,7 +1740,7 @@ download_sedac_groads <- function( #' Download population density data #' @description #' The \code{download_sedac_population()} function accesses and downloads -#' population density data from [NASA's UN WPP-Adjusted Population Density, v4.11](https://sedac.ciesin.columbia.edu/data/set/gpw-v4-population-density-adjusted-to-2015-unwpp-country-totals-rev11). +#' population density data from [NASA's UN WPP-Adjusted Population Density, v4.11](https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-gpwv4-apdens-wpp-2015-r11-4.11). #' @param data_resolution character(1). Available resolutions are 30 second #' (approx. 1 km), 2.5 minute (approx. 5 km), 15 minute (approx. 30 km), #' 30 minute (approx. 55 km), and 60 minute (approx. 110 km). @@ -1756,6 +1777,7 @@ download_sedac_groads <- function( #' @references #' \insertRef{data_ciesin2017gpwv4}{amadeus} #' @examples +#' \dontrun{ #' download_sedac_population( #' data_resolution = "30 second", #' data_format = "GeoTIFF", @@ -1766,6 +1788,7 @@ download_sedac_groads <- function( #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } #' @export download_sedac_population <- function( data_resolution = "60 minute", @@ -1877,7 +1900,7 @@ download_sedac_population <- function( "_curl_commands.txt" ) download_sink(commands_txt) - if (!file.exists(download_name)) { + if (check_destfile(download_name)) { #### 13. concatenate and print download command to "..._curl_commands.txt" #### cat command if file does not already exist or is incomplete cat(download_command) @@ -1947,6 +1970,7 @@ download_sedac_population <- function( #' @references #' \insertRef{web_HMSabout}{amadeus} #' @examples +#' \dontrun{ #' download_hms( #' data_format = "Shapefile", #' date = "2024-01-01", @@ -1956,6 +1980,7 @@ download_sedac_population <- function( #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } #' @export # nolint end # nolint start: cyclocomp @@ -2058,7 +2083,7 @@ download_hms <- function( url, "\n" ) - if (!file.exists(destfile)) { + if (check_destfile(destfile)) { #### cat command only if file does not already exist cat(command) } @@ -2137,6 +2162,7 @@ download_hms <- function( #' #' \insertRef{article_beck2018present}{amadeus} #' @examples +#' \dontrun{ #' download_koppen_geiger( #' data_resolution = "0.0083", #' time_period = "Present", @@ -2146,6 +2172,7 @@ download_hms <- function( #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } # nolint end #' @export download_koppen_geiger <- function( @@ -2209,7 +2236,7 @@ download_koppen_geiger <- function( "_wget_command.txt" ) download_sink(commands_txt) - if (!file.exists(download_name)) { + if (check_destfile(download_name)) { #### 12. concatenate and print download command to "..._wget_commands.txt" #### cat command if file does not already exist or is incomplete cat(download_command) @@ -2426,6 +2453,13 @@ download_modis <- function( ladsurl <- "https://ladsweb.modaps.eosdis.nasa.gov/" version <- ifelse(startsWith(product, "VNP"), "5000", version) + #### 11. define date sequence + date_sequence <- generate_date_sequence( + date[1], + date[2], + sub_hyphen = FALSE + ) + #### 10. MOD06_L2 manual input if (product == "MOD06_L2") { mod06l2_url1 <- @@ -2447,40 +2481,48 @@ download_modis <- function( )) } + date_julian <- format(date_sequence, "%Y%j") + #### 10-1. Parse urls in csv file_url <- read.csv(mod06_links) file_url <- unlist(file_url[, 2]) download_url <- paste0( - substr(ladsurl, 1, nchar(ladsurl) - 1), + # substr(ladsurl, 1, nchar(ladsurl) - 1), file_url ) + download_url <- download_url[ + grep(paste0("A(", paste(date_julian, collapse = "|"), ")"), download_url) + ] + #### 10-2. Parse dates from csv - file_dates <- - regmatches( - file_url, - regexpr("[2][0-2][0-9]{2,2}[0-3][0-9]{2,2}", file_url) - ) - file_dates <- as.integer(file_dates) - date_start <- as.Date(as.character(min(file_dates)), format = "%Y%j") - date_end <- as.Date(as.character(max(file_dates)), format = "%Y%j") + # file_dates <- + # regmatches( + # file_url, + # regexpr("[2][0-2][0-9]{2,2}[0-3][0-9]{2,2}", file_url) + # ) + # file_dates <- as.integer(file_dates) + # date_start <- as.Date(as.character(min(file_dates)), format = "%Y%j") + # date_end <- as.Date(as.character(max(file_dates)), format = "%Y%j") - # Extract year and month from file_dates - splitter <- paste0( - substr(file_dates, 1, 4), "/", substr(file_dates, 5, 7), "/" - ) # Extract download names from file_url using splitter - download_name <- sapply(strsplit(file_url, splitter), `[`, 2) + download_name <- sapply(strsplit(download_url, "archives/"), `[`, 2) + + # Create directory structure with julian dates + dir_substr <- paste0( + substr(download_name, 11, 14), "/", + substr(download_name, 15, 17), "/" + ) #### 10-3. initiate "..._wget_commands.txt" file commands_txt <- paste0( directory_to_save, product, "_", - date_start, + date_julian[1], "_", - date_end, + date_julian[length(date_julian)], "_wget_commands.txt" ) @@ -2493,7 +2535,7 @@ download_modis <- function( nasa_earth_data_token, "\" -O ", directory_to_save, - splitter, + dir_substr, download_name, "\n" ) @@ -2501,12 +2543,13 @@ download_modis <- function( #### filter commands to non-existing files download_command <- download_command[ which( - !file.exists(paste0(directory_to_save, splitter, download_name)) + !file.exists(paste0(directory_to_save, dir_substr, download_name)) | + file.size(paste0(directory_to_save, dir_substr, download_name)) == 0 ) ] new_dirs <- unique( - sprintf("%s%s", directory_to_save, splitter) + sprintf("%s%s", directory_to_save, dir_substr) ) lapply( @@ -2530,13 +2573,6 @@ download_modis <- function( return(download_hash(hash, directory_to_save)) } - - #### 11. define date sequence - date_sequence <- generate_date_sequence( - date[1], - date[2], - sub_hyphen = FALSE - ) # In a certain year, list all available dates year <- as.character(substr(date[1], 1, 4)) filedir_year_url <- @@ -2646,9 +2682,8 @@ download_modis <- function( #### filter commands to non-existing files download_command <- download_command[ which( - !file.exists( - paste0(directory_to_save, dir_substr, download_name) - ) + !file.exists(paste0(directory_to_save, dir_substr, download_name)) | + file.size(paste0(directory_to_save, dir_substr, download_name)) == 0 ) ] @@ -2700,6 +2735,7 @@ download_modis <- function( #' @references #' \insertRef{web_usepa2024tri}{amadeus} #' @examples +#' \dontrun{ #' download_tri( #' year = 2021L, #' directory_to_save = tempdir(), @@ -2707,6 +2743,7 @@ download_modis <- function( #' download = FALSE, # NOTE: download skipped for examples, #' remove_command = TRUE #' ) +#' } #' @export download_tri <- function( year = c(2018L, 2022L), @@ -2747,7 +2784,7 @@ download_tri <- function( #### filter commands to non-existing files download_commands <- download_commands[ which( - !file.exists(download_names) + !file.exists(download_names) | file.size(download_names) == 0 ) ] #### 5. initiate "..._curl_commands.txt" @@ -2823,6 +2860,7 @@ download_tri <- function( #' @references #' \insertRef{web_usepa2024nei}{amadeus} #' @examples +#' \dontrun{ #' download_nei( #' year = c(2017L, 2020L), #' directory_to_save = tempdir(), @@ -2831,6 +2869,7 @@ download_tri <- function( #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } #' @export download_nei <- function( epa_certificate_path = @@ -2886,7 +2925,7 @@ download_nei <- function( #### filter commands to non-existing files download_commands <- download_commands[ which( - !file.exists(download_names) + !file.exists(download_names) | file.size(download_names) == 0 ) ] #### 5. initiate "..._curl_commands.txt" @@ -2968,6 +3007,7 @@ download_nei <- function( #' @references #' \insertRef{data_usgs2023nhd}{amadeus} #' @examples +#' \dontrun{ #' download_huc( #' region = "Lower48", #' type = "Seamless", @@ -2977,6 +3017,7 @@ download_nei <- function( #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } #' @export # @importFrom archive archive_extract download_huc <- @@ -3115,6 +3156,7 @@ download_huc <- #' * Yearly comma-separated value (CSV) files will be stored in #' \code{directory_to_save}. #' @examples +#' \dontrun{ #' download_cropscape( #' year = 2020, #' source = "USDA", @@ -3124,6 +3166,7 @@ download_huc <- #' remove_command = TRUE, #' unzip = FALSE #' ) +#' } #' @importFrom archive archive_extract #' @export download_cropscape <- function( @@ -3267,6 +3310,7 @@ download_cropscape <- function( #' @references #' \insertRef{article_daly2000prism}{amadeus} #' @examples +#' \dontrun{ #' download_prism( #' time = "202104", #' element = "ppt", @@ -3277,6 +3321,7 @@ download_cropscape <- function( #' download = FALSE, # NOTE: download skipped for examples, #' remove_command = TRUE #' ) +#' } #' @references #' * [PRISM Climate Group](https://prism.oregonstate.edu/) #' * [PRISM Web Service Guide](https://prism.oregonstate.edu/documents/PRISM_downloads_web_service.pdf) @@ -3401,6 +3446,7 @@ download_prism <- function( #' @references #' \insertRef{article_abatzoglou2013development}{amadeus} #' @examples +#' \dontrun{ #' download_gridmet( #' variables = "Precipitation", #' year = 2023, @@ -3409,6 +3455,7 @@ download_prism <- function( #' download = FALSE, # NOTE: download skipped for examples, #' remove_command = TRUE #' ) +#' } #' @export # nolint end download_gridmet <- function( @@ -3492,7 +3539,7 @@ download_gridmet <- function( url, "\n" ) - if (!file.exists(destfile)) { + if (check_destfile(destfile)) { #### cat command only if file does not already exist cat(command) } @@ -3540,6 +3587,7 @@ download_gridmet <- function( #' @references #' \insertRef{article_abatzoglou2018terraclimate}{amadeus} #' @examples +#' \dontrun{ #' download_terraclimate( #' variables = "Precipitation", #' year = 2023, @@ -3548,6 +3596,7 @@ download_gridmet <- function( #' download = FALSE, # NOTE: download skipped for examples, #' remove_command = TRUE #' ) +#' } #' @export # nolint end download_terraclimate <- function( @@ -3632,7 +3681,7 @@ download_terraclimate <- function( url, "\n" ) - if (!file.exists(destfile)) { + if (check_destfile(destfile)) { #### cat command only if file does not already exist cat(command) } diff --git a/R/download_auxiliary.R b/R/download_auxiliary.R index 7536e18a..937f2093 100644 --- a/R/download_auxiliary.R +++ b/R/download_auxiliary.R @@ -272,7 +272,7 @@ generate_date_sequence <- as.Date(date_end, format = "%Y-%m-%d"), "day" ) - if (sub_hyphen == TRUE) { + if (sub_hyphen) { dates_sub_hyphen <- gsub("-", "", as.character(dates_original)) return(dates_sub_hyphen) } else { @@ -528,12 +528,15 @@ narr_variable <- function(variable) { return(list(base, months)) } -#' Create has of downloaded files. +#' Create hash of downloaded files. #' @description -#' Create \code{rlang::hash_file} of the downloaded files. +#' Create a combined SHA-1 hash based on the contents and sizes of files +#' in a specified directory. System-specific metadata (e.g. full file paths, +#' access times, or user information) are not tracked, ensuring the hash +#' remains consistent across different systems, users, and access times. #' @param hash logical(1). Create hash of downloaded files. #' @param dir character(1). Directory path. -#' @return character(1) \code{rlang::hash} of downloaded files. +#' @return character(1) Combined SHA-1 hash of the files' contents and sizes. #' @keywords internal auxiliary #' @importFrom rlang hash_file #' @export @@ -542,7 +545,34 @@ download_hash <- function( dir = NULL ) { if (hash) { - h <- rlang::hash_file(dir) - return(h) + h_command <- paste0( + "(find ", + shQuote(dir), + " -type f -print0 | sort -z | ", + "xargs -0 sha1sum -- | awk '{print $1}'; ", + "find ", + shQuote(dir), + " -type f -print0 | sort -z | ", + "xargs -0 stat -c '%s') | sha1sum" + ) + h <- system(h_command, intern = TRUE) + h_clean <- sub(" -$", "", h) + return(h_clean) + } +} + +#' Check if destination file exists or is 0 bytes. +#' @description +#' Check if destination file exists or is 0 bytes. If either condition is +#' met, the function returns `TRUE` to allow the download to proceed. +#' @param destfile character(1). Destination file path. +#' @return logical(1) +#' @keywords internal auxiliary +#' @export +check_destfile <- function(destfile) { + if (!file.exists(destfile) || file.size(destfile) == 0) { + return(TRUE) + } else { + return(FALSE) } } diff --git a/R/process.R b/R/process.R index 4cad21b5..b7bae4bb 100644 --- a/R/process.R +++ b/R/process.R @@ -41,7 +41,7 @@ #' \dontrun{ #' process_covariates( #' covariate = "narr", -#' date = c("2018-01-01", "2018-01-01"), +#' date = c("2018-01-01", "2018-01-10"), #' variable = "weasd", #' path = system.file("extdata", "examples", "narr", "weasd") #' ) @@ -152,7 +152,7 @@ process_covariates <- #' @author Insang Song #' @return A character object that conforms to the regular #' expression. Details of regular expression in R can be found in [regexp]. -#' @seealso [calc_modis_par] +#' @seealso [calculate_modis_par] #' @examples #' process_modis_sds(product = "MOD09GA") #' @export @@ -908,10 +908,10 @@ process_ecoregion <- #' @author Insang Song, Mariana Kassien #' @return a `SpatVector` object (points) in `year` #' `year` is stored in a field named `"year"`. -#' @note Visit [TRI Data and Tools](https://www.epa.gov/toxics-release-inventory-tri-program/tri-data-and-tools) +#' @note Visit [TRI Data and Tools](https://www.epa.gov/toxics-release-inventory-tri-program/tri-toolbox) #' to view the available years and variables. #' @references -#' https://www.epa.gov/toxics-release-inventory-tri-program/tri-data-and-tools +#' https://www.epa.gov/toxics-release-inventory-tri-program/tri-toolbox #' @importFrom terra vect #' @importFrom terra crs #' @importFrom terra nearby @@ -1135,7 +1135,7 @@ process_nei <- function( #' the internal procedure of this function keeps "Included" if there #' are multiple event types per site-time. #' @param path character(1). Directory path to daily measurement data. -#' @param date character(2). Start and end date. +#' @param date character(1 or 2). Date (1) or start and end dates (2). #' Should be in `"YYYY-MM-DD"` format and sorted. #' @param mode character(1). One of #' * "date-location" (all dates * all locations) @@ -1191,7 +1191,11 @@ process_aqs <- stop("date has invalid format(s). Please check the values.") } if (length(date) != 2) { - stop("date should be a character vector of length 2.") + if (length(date) == 1) { + date <- c(date, date) + } else { + stop("date should be a character vector of length 1 or 2.") + } } } else { stop("date should be defined.") @@ -1420,7 +1424,7 @@ process_sedac_population <- function( #' @param ... Placeholders. #' @note U.S. context. The returned `SpatVector` object contains a #' `$description` column to represent the temporal range covered by the -#' dataset. For more information, see . +#' dataset. For more information, see . #' @author Insang Song #' @return a `SpatVector` object #' @importFrom terra vect @@ -1456,8 +1460,7 @@ process_sedac_groads <- function( #' @description #' The \code{process_hms()} function imports and cleans raw wildfire smoke #' plume coverage data, returning a single `SpatVector` object. -#' @param date character(2). length of 10 each. -#' Start/end date of downloaded data. +#' @param date character(1 or 2). Date (1) or start and end dates (2). #' Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01"). #' @param path character(1). Directory with downloaded NOAA HMS data files. #' @param extent numeric(4) or SpatExtent giving the extent of the output @@ -1484,7 +1487,7 @@ process_sedac_groads <- function( #' @importFrom stats na.omit #' @export process_hms <- function( - date = c("2018-01-01", "2018-01-01"), + date = "2018-01-01", path = NULL, extent = NULL, ...) { @@ -1493,6 +1496,7 @@ process_hms <- function( #### check for variable check_for_null_parameters(mget(ls())) #### check dates + if (length(date) == 1) date <- c(date, date) stopifnot(length(date) == 2) date <- date[order(as.Date(date))] #### identify file paths @@ -1789,8 +1793,7 @@ process_gmted <- function( #' @description #' The \code{process_narr()} function imports and cleans raw meteorological #' data, returning a single `SpatRaster` object. -#' @param date character(2). length of 10 each. -#' Start/end date of downloaded data. +#' @param date character(1 or 2). Date (1) or start and end dates (2). #' Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01"). #' @param variable character(1). Variable name acronym. See [List of Variables in NARR Files](https://ftp.cpc.ncep.noaa.gov/NARR/fixed/merged_land_AWIP32corrected.pdf) #' for variable names and acronym codes. @@ -1812,7 +1815,7 @@ process_gmted <- function( #' ## amount of data which is not included in the package. #' \dontrun{ #' process_narr( -#' date = c("2018-01-01", "2018-01-01"), +#' date = c("2018-01-01", "2018-01-10"), #' variable = "weasd", #' path = "./tests/testdata/narr/weasd" #' ) @@ -1820,7 +1823,7 @@ process_gmted <- function( #' @export # nolint end process_narr <- function( - date = c("2023-09-01", "2023-09-01"), + date = "2023-09-01", variable = NULL, path = NULL, extent = NULL, @@ -1830,6 +1833,7 @@ process_narr <- function( #### check for variable check_for_null_parameters(mget(ls())) #### check dates + if (length(date) == 1) date <- c(date, date) stopifnot(length(date) == 2) date <- date[order(as.Date(date))] #### identify file paths @@ -2041,7 +2045,8 @@ process_narr <- function( #' @description #' The \code{process_geos()} function imports and cleans raw atmospheric #' composition data, returning a single `SpatRaster` object. -#' @param date character(2). length of 10. Format "YYYY-MM-DD". +#' @param date character(1 or 2). Date (1) or start and end dates (2). +#' Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01"). #' @param variable character(1). GEOS-CF variable name(s). #' @param path character(1). Directory with downloaded netCDF (.nc4) files. #' @param extent numeric(4) or SpatExtent giving the extent of the raster @@ -2069,7 +2074,7 @@ process_narr <- function( #' } #' @export process_geos <- - function(date = c("2018-01-01", "2018-01-01"), + function(date = c("2018-01-01", "2018-01-10"), variable = NULL, path = NULL, extent = NULL, @@ -2079,6 +2084,7 @@ process_geos <- #### check for variable check_for_null_parameters(mget(ls())) #### check dates + if (length(date) == 1) date <- c(date, date) stopifnot(length(date) == 2) date <- date[order(as.Date(date))] #### identify file paths @@ -2233,7 +2239,8 @@ process_geos <- #' @description #' The \code{process_merra2()} function imports and cleans raw atmospheric #' composition data, returning a single `SpatRaster` object. -#' @param date character(2). length of 10. Format "YYYY-MM-DD". +#' @param date character(1 or 2). Date (1) or start and end dates (2). +#' Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01"). #' @param variable character(1). MERRA2 variable name(s). #' @param path character(1). Directory with downloaded netCDF (.nc4) files. #' @param extent numeric(4) or SpatExtent giving the extent of the raster @@ -2263,7 +2270,7 @@ process_geos <- #' } #' @export process_merra2 <- - function(date = c("2018-01-01", "2018-01-01"), + function(date = c("2018-01-01", "2018-01-10"), variable = NULL, path = NULL, extent = NULL, @@ -2273,6 +2280,7 @@ process_merra2 <- #### check for variable check_for_null_parameters(mget(ls())) #### check dates + if (length(date) == 1) date <- c(date, date) stopifnot(length(date) == 2) date <- date[order(as.Date(date))] #### identify file paths @@ -2423,8 +2431,7 @@ process_merra2 <- #' @description #' The \code{process_gridmet()} function imports and cleans raw gridded surface meteorological #' data, returning a single `SpatRaster` object. -#' @param date character(2). length of 10 each. -#' Start/end date of downloaded data. +#' @param date character(1 or 2). Date (1) or start and end dates (2). #' Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01"). #' @param variable character(1). Variable name or acronym code. See [gridMET Generate Wget File](https://www.climatologylab.org/wget-gridmet.html) #' for variable names and acronym codes. (Note: variable "Burning Index" has code "bi" and variable @@ -2454,7 +2461,7 @@ process_merra2 <- #' @export # nolint end process_gridmet <- function( - date = c("2023-09-01", "2023-09-01"), + date = c("2023-09-01", "2023-09-10"), variable = NULL, path = NULL, extent = NULL, @@ -2462,6 +2469,7 @@ process_gridmet <- function( #### directory setup path <- download_sanitize_path(path) #### check dates + if (length(date) == 1) date <- c(date, date) stopifnot(length(date) == 2) date <- date[order(as.Date(date))] #### check for variable @@ -2590,8 +2598,7 @@ process_gridmet <- function( #' @description #' The \code{process_terraclimate()} function imports and cleans climate and water balance #' data, returning a single `SpatRaster` object. -#' @param date character(2). length of 10 each. -#' Start/end date of downloaded data. +#' @param date character(1 or 2). Date (1) or start and end dates (2). #' Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01"). #' @param variable character(1). Variable name or acronym code. See [TerraClimate Direct Downloads](https://climate.northwestknowledge.net/TERRACLIMATE/index_directDownloads.php) #' for variable names and acronym codes. @@ -2623,7 +2630,7 @@ process_gridmet <- function( #' @export # nolint end process_terraclimate <- function( - date = c("2023-09-01", "2023-09-01"), + date = c("2023-09-01", "2023-09-10"), variable = NULL, path = NULL, extent = NULL, @@ -2633,6 +2640,7 @@ process_terraclimate <- function( #### check for variable check_for_null_parameters(mget(ls())) #### check dates + if (length(date) == 1) date <- c(date, date) stopifnot(length(date) == 2) date <- date[order(as.Date(date))] variable_checked <- process_variable_codes( diff --git a/README.md b/README.md index 0ba5b2a6..157f93c3 100644 --- a/README.md +++ b/README.md @@ -21,16 +21,6 @@ install.packages("amadeus") pak::pak("NIEHS/amadeus") ``` -## Contribution - -To add or edit functionality for new data sources or datasets, open a [Pull request](https://github.com/NIEHS/amadeus/pulls) into the main branch with a detailed description of the proposed changes. Pull requests must pass all status checks, and then will be approved or rejected by `amadeus`'s authors. - -Utilize [Issues](https://github.com/NIEHS/amadeus/issues) to notify the authors of bugs, questions, or recommendations. Identify each issue with the appropriate label to help ensure a timely response. - -
- -
- ## Download `download_data` accesses and downloads raw geospatial data from a variety of open source data repositories. The function is a wrapper that calls source-specific download functions, each of which account for the source's unique combination of URL, file naming conventions, and data types. Download functions cover the following sources: @@ -43,8 +33,8 @@ Utilize [Issues](https://github.com/NIEHS/amadeus/issues) to notify the authors | [MRLC[^1] Consortium National Land Cover Database (NLCD)](https://www.mrlc.gov/data) | GeoTIFF | Land Use | | [NASA[^2] Moderate Resolution Imaging Spectroradiometer (MODIS)](https://modis.gsfc.nasa.gov/data/) | HDF | Atmosphere
Meteorology
Land Use
Satellite | | [NASA Modern-Era Retrospective analysis for Research and Applications, Version 2 (MERRA-2)](https://www.nature.com/articles/sdata2018214) | netCDF | Atmosphere
Meteorology | -| [NASA SEDAC[^3] UN WPP-Adjusted Population Density](https://sedac.ciesin.columbia.edu/data/set/gpw-v4-population-density-adjusted-to-2015-unwpp-country-totals-rev11) | GeoTIFF
netCDF | Population | -| [NASA SEDAC Global Roads Open Access Data Set](https://sedac.ciesin.columbia.edu/data/set/groads-global-roads-open-access-v1/data-download) | Shapefile
Geodatabase | Roadways | +| [NASA SEDAC[^3] UN WPP-Adjusted Population Density](https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-gpwv4-apdens-wpp-2015-r11-4.11) | GeoTIFF
netCDF | Population | +| [NASA SEDAC Global Roads Open Access Data Set](https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-groads-v1-1.00) | Shapefile
Geodatabase | Roadways | | [NASA Goddard Earth Observing System Composition Forcasting (GEOS-CF)](https://gmao.gsfc.nasa.gov/GEOS_systems/) | netCDF | Atmosphere
Meteorology | | [NOAA Hazard Mapping System Fire and Smoke Product](https://www.ospo.noaa.gov/products/land/hms.html#about) | Shapefile
KML | Wildfire Smoke | | [NOAA NCEP[^4] North American Regional Reanalysis (NARR)](https://psl.noaa.gov/data/gridded/data.narr.html) | netCDF | Atmosphere
Meteorology | @@ -65,19 +55,21 @@ Example use of `download_data` using NOAA NCEP North American Regional Reanalysi directory <- "/ EXAMPLE / FILE / PATH /" download_data( dataset_name = "narr", - year = c(2022, 2022), + year = 2022, variable = "weasd", directory_to_save = directory, acknowledgement = TRUE, - download = TRUE + download = TRUE, + hash = TRUE ) ``` ``` Downloading requested files... Requested files have been downloaded. +[1] "5655d4281b76f4d4d5bee234c2938f720cfec879" ``` ```r -list.files(paste0(directory, "weasd")) +list.files(file.path(directory, "weasd")) ``` ``` [1] "weasd.2022.nc" @@ -92,21 +84,21 @@ To avoid errors when using `process_covariates`, **do not edit the raw downloade Example use of `process_covariates` using the downloaded "weasd" data. ```r -weasd <- process_covariates( +weasd_process <- process_covariates( covariate = "narr", date = c("2022-01-01", "2022-01-05"), variable = "weasd", - path = paste0(directory, "weasd"), + path = file.path(directory, "weasd"), extent = NULL ) ``` ``` -Cleaning weasd data for January, 2022... Detected monolevel data... +Cleaning weasd data for 2022... Returning daily weasd data from 2022-01-01 to 2022-01-05. ``` ```r -weasd +weasd_process ``` ``` class : SpatRaster @@ -123,19 +115,19 @@ time : 2022-01-01 to 2022-01-05 UTC ## Calculate Covariates -`calc_covariates` stems from the [`beethoven`](https://github.com/NIEHS/beethoven) project's need for various types of data extracted at precise locations. `calc_covariates`, therefore, extracts data from the "cleaned" `SpatRaster` or `SpatVector` object at user defined locations. Users can choose to buffer the locations. The function returns a `data.frame` or `SpatVector` with data extracted at all locations for each layer or row in the `SpatRaster` or `SpatVector` object, respectively. +`calculate_covariates` stems from the [`beethoven`](https://github.com/NIEHS/beethoven) project's need for various types of data extracted at precise locations. `calculate_covariates`, therefore, extracts data from the "cleaned" `SpatRaster` or `SpatVector` object at user defined locations. Users can choose to buffer the locations. The function returns a `data.frame`, `sf`, or `SpatVector` with data extracted at all locations for each layer or row in the `SpatRaster` or `SpatVector` object, respectively. -Example of `calc_covariates` using processed "weasd" data. +Example of `calculate_covariates` using processed "weasd" data. ```r locs <- data.frame(id = "001", lon = -78.8277, lat = 35.95013) -weasd_covar <- calc_covariates( +weasd_covar <- calculate_covariates( covariate = "narr", from = weasd_process, locs = locs, locs_id = "id", radius = 0, - geom = FALSE + geom = "sf" ) ``` ``` @@ -151,14 +143,23 @@ Returning extracted covariates. weasd_covar ``` ``` - id time weasd_0 -1 0001 2022-01-01 0.000000000 -2 0001 2022-01-02 0.000000000 -3 0001 2022-01-03 0.000000000 -4 0001 2022-01-04 0.000000000 -5 0001 2022-01-05 0.001953125 +Simple feature collection with 5 features and 3 fields +Geometry type: POINT +Dimension: XY +Bounding box: xmin: 8184606 ymin: 3523283 xmax: 8184606 ymax: 3523283 +Projected CRS: unnamed + id time weasd_0 geometry +1 001 2022-01-01 0.000000000 POINT (8184606 3523283) +2 001 2022-01-02 0.000000000 POINT (8184606 3523283) +3 001 2022-01-03 0.000000000 POINT (8184606 3523283) +4 001 2022-01-04 0.000000000 POINT (8184606 3523283) +5 001 2022-01-05 0.001953125 POINT (8184606 3523283) ``` +## Climate and Health Outcomes Research Data Systems + +The `amadeus` package has been developed as part of the National Institute of Environmental Health Science's (NIEHS) [Climate and Health Outcomes Research Data Systems (CHORDS)](https://www.niehs.nih.gov/research/programs/chords) program. CHORDS aims to "build and strengthen data infrastructure for patient-centered outcomes research on climate change and health" by providing curated data, analysis tools, and educational resources. Visit the CHORDS catalog at [https://niehs.github.io/chords_landing/index.html](https://niehs.github.io/chords_landing/index.html). + ## Additional Resources The following R packages can also be used to access climate and weather data in R, but each differs from `amadeus` in the data sources covered or type of functionality provided. @@ -172,6 +173,12 @@ The following R packages can also be used to access climate and weather data in | [`rNOMADS`](https://cran.r-project.org/package=rNOMADS) | [NOAA Operational Model Archive and Distribution System](https://nomads.ncep.noaa.gov/) | | [`sen2r`[^8]](https://github.com/ranghetti/sen2r) | [Sentinel-2](https://sentiwiki.copernicus.eu/web/s2-mission) | +## Contribution + +To add or edit functionality for new data sources or datasets, open a [Pull request](https://github.com/NIEHS/amadeus/pulls) into the main branch with a detailed description of the proposed changes. Pull requests must pass all status checks, and then will be approved or rejected by `amadeus`'s authors. + +Utilize [Issues](https://github.com/NIEHS/amadeus/issues) to notify the authors of bugs, questions, or recommendations. Identify each issue with the appropriate label to help ensure a timely response. + [^1]: Multi-Resolution Land Characteristics [^2]: National Aeronautics and Space Administration [^3]: Socioeconomic Data and Applications Center diff --git a/_pkgdown.yml b/_pkgdown.yml index ae2ff330..bffac97a 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -22,8 +22,9 @@ reference: - subtitle: Covariates desc: Functions which extract data values at user-defined points - contents: - - calc_covariates - - starts_with("calc_") + - calculate_covariates + - starts_with("calculate_") + - sum_edc - subtitle: Spatio-Temporal desc: Functions which convert to, from, and between spatio-temporal data types - contents: diff --git a/inst/REFERENCES.bib b/inst/REFERENCES.bib index 9ac3b9fd..ce4dd3fb 100644 --- a/inst/REFERENCES.bib +++ b/inst/REFERENCES.bib @@ -800,7 +800,7 @@ @misc{web_usepa2024nei @misc{data_ciesin2013groads, title = {Global {Roads} {Open} {Access} {Data} {Set}, {Version} 1 ({gROADSv1})}, copyright = {Users may use and redistribute these data without explicit written permission from CIESIN or Information Technology Outreach Services (ITOS)/University of Georgia, with the exception of roads data associated with countries that are listed in the data documentation as requiring additional credits or holding special restrictions. Users are advised to consult the data documentation for further information and to obtain necessary permissions or adhere to relevant restrictions that apply to each of those data sets.}, - url = {https://sedac.ciesin.columbia.edu/data/set/groads-global-roads-open-access-v1}, + url = {https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-groads-v1-1.00}, doi = {10.7927/H4VD6WCT}, abstract = {The Global Roads Open Access Data Set, Version 1 (gROADSv1) was developed under the auspices of the CODATA Global Roads Data Development Task Group. The data set combines the best available roads data by country into a global roads coverage, using the UN Spatial Data Infrastructure Transport (UNSDI-T) version 2 as a common data model. All country road networks have been joined topologically at the borders, and many countries have been edited for internal topology. Source data for each country are provided in the documentation, and users are encouraged to refer to the readme file for use constraints that apply to a small number of countries. Because the data are compiled from multiple sources, the date range for road network representations ranges from the 1980s to 2010 depending on the country (most countries have no confirmed date), and spatial accuracy varies. The baseline global data set was compiled by the Information Technology Outreach Services (ITOS) of the University of Georgia. Updated data for 27 countries and 6 smaller geographic entities were assembled by Columbia University's Center for International Earth Science Information Network (CIESIN), with a focus largely on developing countries with the poorest data coverage.}, urldate = {2024-06-24}, @@ -929,7 +929,7 @@ @misc{data_ciesin2017gpwv4 title = {Gridded {Population} of the {World}, {Version} 4 ({GPWv4}): {Population} {Density}, {Revision} 11}, copyright = {This work is licensed under the Creative Commons Attribution 4.0 International License (https://creativecommons.org/licenses/by/4.0). Users are free to use, copy, distribute, transmit, and adapt the work for commercial and non-commercial purposes, without restriction, as long as clear attribution of the source is provided.}, shorttitle = {Gridded {Population} of the {World}, {Version} 4 ({GPWv4})}, - url = {https://sedac.ciesin.columbia.edu/data/set/gpw-v4-population-density-rev11}, + url = {https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-gpwv4-popdens-r11-4.11}, doi = {10.7927/H49C6VHW}, abstract = {The Gridded Population of the World, Version 4 (GPWv4): Population Density, Revision 11 consists of estimates of human population density (number of persons per square kilometer) based on counts consistent with national censuses and population registers, for the years 2000, 2005, 2010, 2015, and 2020. A proportional allocation gridding algorithm, utilizing approximately 13.5 million national and sub-national administrative units, was used to assign population counts to 30 arc-second grid cells. The population density rasters were created by dividing the population count raster for a given target year by the land area raster. The data files were produced as global rasters at 30 arc-second ({\textasciitilde}1 km at the equator) resolution. To enable faster global processing, and in support of research communities, the 30 arc-second count data were aggregated to 2.5 arc-minute, 15 arc-minute, 30 arc-minute and 1 degree resolutions to produce density rasters at these resolutions.}, urldate = {2024-06-24}, diff --git a/man/calc_prepare_locs.Rd b/man/calc_prepare_locs.Rd index bdbadfca..96879a6d 100644 --- a/man/calc_prepare_locs.Rd +++ b/man/calc_prepare_locs.Rd @@ -21,7 +21,8 @@ Passed from \code{calc_\*()}.} (Default = 0). Passed from \code{calc_\*()}.} \item{geom}{logical(1). Should the geometry of \code{locs} be returned in the -\code{data.frame}? Default is \code{FALSE}.} +\code{data.frame}? Default is \code{FALSE}, options "sf" or "terra" will preserve +geometry, but will use \code{terra} for extraction.} } \value{ A \code{list} containing \code{SpatVector} and \code{data.frame} objects diff --git a/man/calc_return_locs.Rd b/man/calc_return_locs.Rd index d76d9c46..97a52611 100644 --- a/man/calc_return_locs.Rd +++ b/man/calc_return_locs.Rd @@ -13,8 +13,8 @@ calc_return_locs(covar, POSIXt = TRUE, geom, crs) \code{POSIXt}? If \code{FALSE}, the time values will be checked for integer class (year and year-month).} -\item{geom}{logical(1). Should \code{covar} be returned as a -\code{data.frame}? Default is \code{FALSE}.} +\item{geom}{FALSE/"sf"/"terra". Should \code{covar} be returned as a +\code{data.frame}? Default is \code{FALSE}, options with geometry are "sf" or "terra".} \item{crs}{terra::crs(1). Coordinate reference system (inherited from \code{from}).} diff --git a/man/calc_covariates.Rd b/man/calculate_covariates.Rd similarity index 62% rename from man/calc_covariates.Rd rename to man/calculate_covariates.Rd index 8627c940..8fbf5f5e 100644 --- a/man/calc_covariates.Rd +++ b/man/calculate_covariates.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_covariates} -\alias{calc_covariates} +\name{calculate_covariates} +\alias{calculate_covariates} \title{Calculate covariates wrapper function} \usage{ -calc_covariates( +calculate_covariates( covariate = c("modis", "koppen-geiger", "koeppen-geiger", "koppen", "koeppen", "geos", "dummies", "gmted", "sedac_groads", "groads", "roads", "ecoregions", "ecoregion", "hms", "smoke", "gmted", "narr", "geos", "sedac_population", "population", "nlcd", @@ -49,7 +49,7 @@ SpatRaster or SpatVector objects before passing to ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_covariates( +calculate_covariates( covariate = "narr", from = narr, # derived from process_covariates() example locs = loc, @@ -60,22 +60,22 @@ calc_covariates( } \seealso{ \itemize{ -\item \code{\link{calc_modis_par}}: "modis", "MODIS" -\item \code{\link{calc_koppen_geiger}}: "koppen-geiger", "koeppen-geiger", "koppen" -\item \code{\link{calc_ecoregion}}: "ecoregion", "ecoregions" -\item \code{\link{calc_temporal_dummies}}: "dummies", "Dummies" -\item \code{\link{calc_hms}}: "hms", "smoke", "HMS" -\item \code{\link{calc_gmted}}: "gmted", "GMTED" -\item \code{\link{calc_narr}}: "narr", "NARR" -\item \code{\link{calc_geos}}: "geos", "geos_cf", "GEOS" -\item \code{\link{calc_sedac_population}}: "population", "sedac_population" -\item \code{\link{calc_sedac_groads}}: "roads", "groads", "sedac_groads" -\item \code{\link{calc_nlcd}}: "nlcd", "NLCD" -\item \code{\link{calc_tri}}: "tri", "TRI" -\item \code{\link{calc_nei}}: "nei", "NEI" -\item \code{\link{calc_merra2}}: "merra", "MERRA", "merra2", "MERRA2" -\item \code{\link{calc_gridmet}}: "gridMET", "gridmet" -\item \code{\link{calc_terraclimate}}: "terraclimate", "TerraClimate" +\item \code{\link{calculate_modis_par}}: "modis", "MODIS" +\item \code{\link{calculate_koppen_geiger}}: "koppen-geiger", "koeppen-geiger", "koppen" +\item \code{\link{calculate_ecoregion}}: "ecoregion", "ecoregions" +\item \code{\link{calculate_temporal_dummies}}: "dummies", "Dummies" +\item \code{\link{calculate_hms}}: "hms", "smoke", "HMS" +\item \code{\link{calculate_gmted}}: "gmted", "GMTED" +\item \code{\link{calculate_narr}}: "narr", "NARR" +\item \code{\link{calculate_geos}}: "geos", "geos_cf", "GEOS" +\item \code{\link{calculate_sedac_population}}: "population", "sedac_population" +\item \code{\link{calculate_sedac_groads}}: "roads", "groads", "sedac_groads" +\item \code{\link{calculate_nlcd}}: "nlcd", "NLCD" +\item \code{\link{calculate_tri}}: "tri", "TRI" +\item \code{\link{calculate_nei}}: "nei", "NEI" +\item \code{\link{calculate_merra2}}: "merra", "MERRA", "merra2", "MERRA2" +\item \code{\link{calculate_gridmet}}: "gridMET", "gridmet" +\item \code{\link{calculate_terraclimate}}: "terraclimate", "TerraClimate" } } \author{ diff --git a/man/calc_ecoregion.Rd b/man/calculate_ecoregion.Rd similarity index 77% rename from man/calc_ecoregion.Rd rename to man/calculate_ecoregion.Rd index a8ca1851..41bafc1d 100644 --- a/man/calc_ecoregion.Rd +++ b/man/calculate_ecoregion.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_ecoregion} -\alias{calc_ecoregion} +\name{calculate_ecoregion} +\alias{calculate_ecoregion} \title{Calculate ecoregions covariates} \usage{ -calc_ecoregion(from = NULL, locs, locs_id = "site_id", geom = FALSE, ...) +calculate_ecoregion(from = NULL, locs, locs_id = "site_id", geom = FALSE, ...) } \arguments{ \item{from}{SpatVector(1). Output of \code{\link{process_ecoregion}}.} @@ -14,9 +14,9 @@ a unique identifier field named \code{locs_id}} \item{locs_id}{character(1). Name of unique identifier.} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -39,7 +39,7 @@ each ecoregion. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_ecoregion( +calculate_ecoregion( from = ecoregion, # derived from process_ecoregion() example locs = loc, locs_id = "id", diff --git a/man/calc_geos.Rd b/man/calculate_geos.Rd similarity index 83% rename from man/calc_geos.Rd rename to man/calculate_geos.Rd index 70172ad4..9a009329 100644 --- a/man/calc_geos.Rd +++ b/man/calculate_geos.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_geos} -\alias{calc_geos} +\name{calculate_geos} +\alias{calculate_geos} \title{Calculate atmospheric composition covariates} \usage{ -calc_geos( +calculate_geos( from, locs, locs_id = NULL, @@ -28,9 +28,9 @@ containing identifier for each unique coordinate location.} \item{fun}{character(1). Function used to summarize multiple raster cells within sites location buffer (Default = \code{mean}).} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -49,7 +49,7 @@ radius. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_geos( +calculate_geos( from = geos, # derived from process_geos() example locs = loc, locs_id = "id", diff --git a/man/calc_gmted.Rd b/man/calculate_gmted.Rd similarity index 83% rename from man/calc_gmted.Rd rename to man/calculate_gmted.Rd index f1aa1990..24da9237 100644 --- a/man/calc_gmted.Rd +++ b/man/calculate_gmted.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_gmted} -\alias{calc_gmted} +\name{calculate_gmted} +\alias{calculate_gmted} \title{Calculate elevation covariates} \usage{ -calc_gmted( +calculate_gmted( from, locs, locs_id = NULL, @@ -28,9 +28,9 @@ containing identifier for each unique coordinate location.} \item{fun}{character(1). Function used to summarize multiple raster cells within sites location buffer (Default = \code{mean}).} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders} } @@ -49,7 +49,7 @@ at 7.5 arc-second resolution with 0 meter buffer: breakline_emphasis_r75_0). ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_gmted( +calculate_gmted( from = gmted, # derived from process_gmted() example locs = loc, locs_id = "id", diff --git a/man/calc_gridmet.Rd b/man/calculate_gridmet.Rd similarity index 81% rename from man/calc_gridmet.Rd rename to man/calculate_gridmet.Rd index b7bd2b9a..4d0c1c82 100644 --- a/man/calc_gridmet.Rd +++ b/man/calculate_gridmet.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_gridmet} -\alias{calc_gridmet} +\name{calculate_gridmet} +\alias{calculate_gridmet} \title{Calculate gridMET covariates} \usage{ -calc_gridmet( +calculate_gridmet( from, locs, locs_id = NULL, @@ -28,9 +28,9 @@ containing identifier for each unique coordinate location.} \item{fun}{character(1). Function used to summarize multiple raster cells within sites location buffer (Default = \code{mean}).} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -47,7 +47,7 @@ column name reflects the gridMET variable and circular buffer radius. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_gridmet( +calculate_gridmet( from = gridmet, # derived from process_gridmet() example locs = loc, locs_id = "id", diff --git a/man/calc_hms.Rd b/man/calculate_hms.Rd similarity index 78% rename from man/calc_hms.Rd rename to man/calculate_hms.Rd index f594979b..4b53ca65 100644 --- a/man/calc_hms.Rd +++ b/man/calculate_hms.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_hms} -\alias{calc_hms} +\name{calculate_hms} +\alias{calculate_hms} \title{Calculate wildfire smoke covariates} \usage{ -calc_hms(from, locs, locs_id = NULL, radius = 0, geom = FALSE, ...) +calculate_hms(from, locs, locs_id = NULL, radius = 0, geom = FALSE, ...) } \arguments{ \item{from}{SpatVector(1). Output of \code{process_hms()}.} @@ -17,9 +17,9 @@ containing identifier for each unique coordinate location.} \item{radius}{integer(1). Circular buffer distance around site locations. (Default = 0).} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -37,7 +37,7 @@ covered by wildfire smoke plume; 1 = point covered by wildfire smoke plume). ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_hms( +calculate_hms( from = hms, # derived from process_hms() example locs = loc, locs_id = "id", diff --git a/man/calc_koppen_geiger.Rd b/man/calculate_koppen_geiger.Rd similarity index 80% rename from man/calc_koppen_geiger.Rd rename to man/calculate_koppen_geiger.Rd index de7e370d..9ffbb6bd 100644 --- a/man/calc_koppen_geiger.Rd +++ b/man/calculate_koppen_geiger.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_koppen_geiger} -\alias{calc_koppen_geiger} +\name{calculate_koppen_geiger} +\alias{calculate_koppen_geiger} \title{Calculate climate classification covariates} \usage{ -calc_koppen_geiger( +calculate_koppen_geiger( from = NULL, locs = NULL, locs_id = "site_id", @@ -20,9 +20,9 @@ a unique identifier field named \code{locs_id}} \item{locs_id}{character(1). Name of unique identifier.} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -46,7 +46,7 @@ dataset. For more information, see ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_koppen_geiger( +calculate_koppen_geiger( from = kg, # derived from process_koppen_geiger() example locs = loc, locs_id = "id", diff --git a/man/calc_lagged.Rd b/man/calculate_lagged.Rd similarity index 79% rename from man/calc_lagged.Rd rename to man/calculate_lagged.Rd index 662394c3..bd4c602f 100644 --- a/man/calc_lagged.Rd +++ b/man/calculate_lagged.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_lagged} -\alias{calc_lagged} +\name{calculate_lagged} +\alias{calculate_lagged} \title{Calculate temporally lagged covariates} \usage{ -calc_lagged(from, date, lag, locs_id, time_id = "time", geom = FALSE) +calculate_lagged(from, date, lag, locs_id, time_id = "time", geom = FALSE) } \arguments{ \item{from}{data.frame(1). A \code{data.frame} containing calculated covariates @@ -27,7 +27,7 @@ that of \code{from.} To return as a \code{SpatVector}, \code{from} must also be a \code{data.frame} object } \description{ -The \code{calc_lagged()} function calculates daily temporal lagged covariates +The \code{calculate_lagged()} function calculates daily temporal lagged covariates from the output of \code{calculate_covariates()} or \code{calc_*()}. } \note{ @@ -35,9 +35,9 @@ In order to calculate temporally lagged covariates, \code{from} must contain at least the number of lag days before the desired start date. For example, if \verb{date = c("2024-01-01", "2024-01-31)} and \code{lag = 1}, \code{from} must contain data starting at 2023-12-31. -If \code{from} contains geometry features, \code{calc_lagged} will return a column +If \code{from} contains geometry features, \code{calculate_lagged} will return a column with geometry features of the same name. -\code{calc_lagged()} assumes that all columns other than \code{time_id}, +\code{calculate_lagged()} assumes that all columns other than \code{time_id}, \code{locs_id}, and fixed columns of "lat" and "lon", follow the genre, variable, lag, buffer radius format adopted in \code{calc_setcolumns()}. } @@ -46,7 +46,7 @@ lag, buffer radius format adopted in \code{calc_setcolumns()}. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -terracliamte_covar <- calc_terraclimate( +terracliamte_covar <- calculate_terraclimate( from = terraclimate, # derived from process_terraclimate() example locs = loc, locs_id = "id", @@ -54,7 +54,7 @@ terracliamte_covar <- calc_terraclimate( fun = "mean", geom = FALSE ) -calc_lagged( +calculate_lagged( from = terracliamte_covar, locs_id = "id", date = c("2023-01-02", "2023-01-10"), @@ -64,5 +64,5 @@ calc_lagged( } } \seealso{ -\code{\link[=calc_covariates]{calc_covariates()}} +\code{\link[=calculate_covariates]{calculate_covariates()}} } diff --git a/man/calc_merra2.Rd b/man/calculate_merra2.Rd similarity index 78% rename from man/calc_merra2.Rd rename to man/calculate_merra2.Rd index 422dcc87..4b170822 100644 --- a/man/calc_merra2.Rd +++ b/man/calculate_merra2.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_merra2} -\alias{calc_merra2} +\name{calculate_merra2} +\alias{calculate_merra2} \title{Calculate meteorological and atmospheric covariates} \usage{ -calc_merra2( +calculate_merra2( from, locs, locs_id = NULL, @@ -28,9 +28,9 @@ containing identifier for each unique coordinate location.} \item{fun}{character(1). Function used to summarize multiple raster cells within sites location buffer (Default = \code{mean}).} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders} } @@ -48,7 +48,7 @@ name reflects variable and circular buffer radius. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_merra2( +calculate_merra2( from = merra2, # derived from process_merra2() example locs = loc, locs_id = "id", @@ -59,7 +59,7 @@ calc_merra2( } } \seealso{ -\code{\link[=calc_geos]{calc_geos()}}, \code{\link[=process_merra2]{process_merra2()}} +\code{\link[=calculate_geos]{calculate_geos()}}, \code{\link[=process_merra2]{process_merra2()}} } \author{ Mitchell Manware diff --git a/man/calc_modis_daily.Rd b/man/calculate_modis_daily.Rd similarity index 85% rename from man/calc_modis_daily.Rd rename to man/calculate_modis_daily.Rd index 33ee98cc..31816704 100644 --- a/man/calc_modis_daily.Rd +++ b/man/calculate_modis_daily.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_modis_daily} -\alias{calc_modis_daily} +\name{calculate_modis_daily} +\alias{calculate_modis_daily} \title{A single-date MODIS worker for parallelization} \usage{ -calc_modis_daily( +calculate_modis_daily( from = NULL, locs = NULL, locs_id = "site_id", @@ -40,9 +40,9 @@ for details.} Higher values will expedite processing, but will increase memory usage. Maximum possible value is \code{2^31 - 1}.} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.} See \code{\link[exactextractr:exact_extract]{exactextractr::exact_extract}} for details.} \item{...}{Placeholders.} @@ -65,7 +65,7 @@ the file names at users' discretion. ## amount of data which is not included in the package. \dontrun{ locs <- data.frame(lon = -78.8277, lat = 35.95013, id = "001") -calc_modis_daily( +calculate_modis_daily( from = mod06l2_warp, # dervied from process_modis() example locs = locs, locs_id = "id", @@ -81,7 +81,7 @@ calc_modis_daily( \itemize{ \item Preprocessing: \code{\link[=process_modis_merge]{process_modis_merge()}}, \code{\link[=process_modis_swath]{process_modis_swath()}}, \code{\link[=process_blackmarble]{process_blackmarble()}} -\item Parallelization: \code{\link[=calc_modis_par]{calc_modis_par()}} +\item Parallelization: \code{\link[=calculate_modis_par]{calculate_modis_par()}} } } \author{ diff --git a/man/calc_modis_par.Rd b/man/calculate_modis_par.Rd similarity index 92% rename from man/calc_modis_par.Rd rename to man/calculate_modis_par.Rd index 4c190ebb..04e93949 100644 --- a/man/calc_modis_par.Rd +++ b/man/calculate_modis_par.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_modis_par} -\alias{calc_modis_par} +\name{calculate_modis_par} +\alias{calculate_modis_par} \title{Calculate MODIS product covariates in multiple CPU threads} \usage{ -calc_modis_par( +calculate_modis_par( from = NULL, locs = NULL, locs_id = "site_id", @@ -63,9 +63,9 @@ Higher values will expedite processing, but will increase memory usage. Maximum possible value is \code{2^31 - 1}. See \code{\link[exactextractr:exact_extract]{exactextractr::exact_extract}} for details.} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Arguments passed to \code{preprocess}.} } @@ -78,7 +78,7 @@ not the dates without available tiles. } } \description{ -\code{calc_modis_par} essentially runs \code{\link{calc_modis_daily}} function +\code{calculate_modis_par} essentially runs \code{\link{calculate_modis_daily}} function in each thread (subprocess). Based on daily resolution, each day's workload will be distributed to each thread. With \code{product} argument, the files are processed by a customized function where the unique structure @@ -120,7 +120,7 @@ insufficient tiles. \dontrun{ locs <- data.frame(lon = -78.8277, lat = 35.95013, id = "001") locs <- terra::vect(locs, geom = c("lon", "lat"), crs = "EPSG:4326") -calc_modis_par( +calculate_modis_par( from = list.files("./data", pattern = "VNP46A2.", full.names = TRUE), locs = locs, @@ -146,7 +146,7 @@ See details for setting parallelization: This function leverages the calculation of single-day MODIS covariates: \itemize{ -\item \code{\link[=calc_modis_daily]{calc_modis_daily()}} +\item \code{\link[=calculate_modis_daily]{calculate_modis_daily()}} } Also, for preprocessing, please refer to: diff --git a/man/calc_narr.Rd b/man/calculate_narr.Rd similarity index 82% rename from man/calc_narr.Rd rename to man/calculate_narr.Rd index 176243c0..8c53fde3 100644 --- a/man/calc_narr.Rd +++ b/man/calculate_narr.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_narr} -\alias{calc_narr} +\name{calculate_narr} +\alias{calculate_narr} \title{Calculate meteorological covariates} \usage{ -calc_narr( +calculate_narr( from, locs, locs_id = NULL, @@ -28,9 +28,9 @@ containing identifier for each unique coordinate location.} \item{fun}{character(1). Function used to summarize multiple raster cells within sites location buffer (Default = \code{mean}).} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders} } @@ -48,7 +48,7 @@ variable and circular buffer radius. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_narr( +calculate_narr( from = narr, # derived from process_narr() example locs = loc, locs_id = "id", diff --git a/man/calc_nei.Rd b/man/calculate_nei.Rd similarity index 70% rename from man/calc_nei.Rd rename to man/calculate_nei.Rd index b20cc13b..985ba6c0 100644 --- a/man/calc_nei.Rd +++ b/man/calculate_nei.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_nei} -\alias{calc_nei} +\name{calculate_nei} +\alias{calculate_nei} \title{Calculate road emissions covariates} \usage{ -calc_nei(from = NULL, locs = NULL, locs_id = "site_id", geom = FALSE, ...) +calculate_nei(from = NULL, locs = NULL, locs_id = "site_id", geom = FALSE, ...) } \arguments{ \item{from}{SpatVector(1). Output of \code{process_nei()}.} @@ -14,9 +14,9 @@ calc_nei(from = NULL, locs = NULL, locs_id = "site_id", geom = FALSE, ...) \item{locs_id}{character(1). Unique site identifier column name. Unused but kept for compatibility.} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -31,7 +31,7 @@ Calculate road emissions covariates ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_nei( +calculate_nei( from = nei, # derived from process_nei example locs = loc, locs_id = "id" diff --git a/man/calc_nlcd.Rd b/man/calculate_nlcd.Rd similarity index 87% rename from man/calc_nlcd.Rd rename to man/calculate_nlcd.Rd index ab5613a1..feaa10b1 100644 --- a/man/calc_nlcd.Rd +++ b/man/calculate_nlcd.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_nlcd} -\alias{calc_nlcd} +\name{calculate_nlcd} +\alias{calculate_nlcd} \title{Calculate land cover covariates} \usage{ -calc_nlcd( +calculate_nlcd( from, locs, locs_id = "site_id", @@ -36,9 +36,9 @@ Maximum possible value is \code{2^31 - 1}. Only valid when \code{mode = "exact"}. See \code{\link[exactextractr:exact_extract]{exactextractr::exact_extract}} for details.} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{nthreads}{integer(1). Number of threads to be used} @@ -66,7 +66,7 @@ with the buffer. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_nlcd( +calculate_nlcd( from = nlcd, # derived from process_nlcd() example locs = loc, locs_id = "id", diff --git a/man/calc_sedac_groads.Rd b/man/calculate_sedac_groads.Rd similarity index 78% rename from man/calc_sedac_groads.Rd rename to man/calculate_sedac_groads.Rd index 4fcfd686..73164660 100644 --- a/man/calc_sedac_groads.Rd +++ b/man/calculate_sedac_groads.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_sedac_groads} -\alias{calc_sedac_groads} +\name{calculate_sedac_groads} +\alias{calculate_sedac_groads} \title{Calculate roads covariates} \usage{ -calc_sedac_groads( +calculate_sedac_groads( from = NULL, locs = NULL, locs_id = NULL, @@ -28,9 +28,9 @@ containing identifier for each unique coordinate location.} \item{fun}{function(1). Function used to summarize the length of roads within sites location buffer (Default is \code{sum}).} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -47,14 +47,14 @@ is used to convert the unit of length to meters. \note{ Unit is km / sq km. The returned \code{data.frame} object contains a \verb{$time} column to represent the temporal range covered by the -dataset. For more information, see \url{https://sedac.ciesin.columbia.edu/data/set/groads-global-roads-open-access-v1/metadata}. +dataset. For more information, see \url{https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-groads-v1-1.00}. } \examples{ ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_sedac_groads( +calculate_sedac_groads( from = groads, # derived from process_sedac_groads() example locs = loc, locs_id = "id", diff --git a/man/calc_sedac_population.Rd b/man/calculate_sedac_population.Rd similarity index 81% rename from man/calc_sedac_population.Rd rename to man/calculate_sedac_population.Rd index 08d23956..c09ae2dd 100644 --- a/man/calc_sedac_population.Rd +++ b/man/calculate_sedac_population.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_sedac_population} -\alias{calc_sedac_population} +\name{calculate_sedac_population} +\alias{calculate_sedac_population} \title{Calculate population density covariates} \usage{ -calc_sedac_population( +calculate_sedac_population( from, locs, locs_id = NULL, @@ -28,9 +28,9 @@ containing identifier for each unique coordinate location.} \item{fun}{character(1). Function used to summarize multiple raster cells within sites location buffer (Default = \code{mean}).} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders} } @@ -48,7 +48,7 @@ spatial resolution of \code{from} and circular buffer radius. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_sedac_population( +calculate_sedac_population( from = pop, # derived from process_sedac_population() example locs = loc, locs_id = "id", diff --git a/man/calc_temporal_dummies.Rd b/man/calculate_temporal_dummies.Rd similarity index 75% rename from man/calc_temporal_dummies.Rd rename to man/calculate_temporal_dummies.Rd index ab6ee652..45e2d794 100644 --- a/man/calc_temporal_dummies.Rd +++ b/man/calculate_temporal_dummies.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_temporal_dummies} -\alias{calc_temporal_dummies} +\name{calculate_temporal_dummies} +\alias{calculate_temporal_dummies} \title{Calculate temporal dummy covariates} \usage{ -calc_temporal_dummies( +calculate_temporal_dummies( locs, locs_id = "site_id", year = seq(2018L, 2022L), @@ -21,9 +21,9 @@ Default is \code{"site_id"}.} \item{year}{integer. Year domain to dummify. Default is \code{seq(2018L, 2022L)}.} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -40,7 +40,7 @@ value in \code{year}, and month and day of week binary variables. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_temporal_dummies( +calculate_temporal_dummies( locs = loc, locs_id = "id", year = seq(2018L, 2022L) diff --git a/man/calc_terraclimate.Rd b/man/calculate_terraclimate.Rd similarity index 83% rename from man/calc_terraclimate.Rd rename to man/calculate_terraclimate.Rd index f251e272..de1016e5 100644 --- a/man/calc_terraclimate.Rd +++ b/man/calculate_terraclimate.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_terraclimate} -\alias{calc_terraclimate} +\name{calculate_terraclimate} +\alias{calculate_terraclimate} \title{Calculate TerraClimate covariates} \usage{ -calc_terraclimate( +calculate_terraclimate( from = NULL, locs = NULL, locs_id = NULL, @@ -28,9 +28,9 @@ containing identifier for each unique coordinate location.} \item{fun}{character(1). Function used to summarize multiple raster cells within sites location buffer (Default = \code{mean}).} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -53,7 +53,7 @@ will contain the year and month in YYYYMM format (ie. January, 2018 = ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_terraclimate( +calculate_terraclimate( from = terraclimate, # derived from process_terraclimate() example locs = loc, locs_id = "id", diff --git a/man/calc_tri.Rd b/man/calculate_tri.Rd similarity index 77% rename from man/calc_tri.Rd rename to man/calculate_tri.Rd index 8ea296bb..24508721 100644 --- a/man/calc_tri.Rd +++ b/man/calculate_tri.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_tri} -\alias{calc_tri} +\name{calculate_tri} +\alias{calculate_tri} \title{Calculate toxic release covariates} \usage{ -calc_tri( +calculate_tri( from = NULL, locs, locs_id = "site_id", @@ -24,9 +24,9 @@ Default is \code{"site_id"}.} \item{radius}{Circular buffer radius. Default is \code{c(1000, 10000, 50000)} (meters)} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} \item{...}{Placeholders.} } @@ -46,7 +46,7 @@ U.S. context. ## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) -calc_tri( +calculate_tri( from = tri, # derived from process_tri() example locs = loc, locs_id = "id", @@ -55,7 +55,7 @@ calc_tri( } } \seealso{ -\code{\link{calc_sedc}}, \code{\link{process_tri}} +\code{\link{sum_edc}}, \code{\link{process_tri}} } \author{ Insang Song, Mariana Kassien diff --git a/man/check_destfile.Rd b/man/check_destfile.Rd new file mode 100644 index 00000000..92e9ad63 --- /dev/null +++ b/man/check_destfile.Rd @@ -0,0 +1,20 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/download_auxiliary.R +\name{check_destfile} +\alias{check_destfile} +\title{Check if destination file exists or is 0 bytes.} +\usage{ +check_destfile(destfile) +} +\arguments{ +\item{destfile}{character(1). Destination file path.} +} +\value{ +logical(1) +} +\description{ +Check if destination file exists or is 0 bytes. If either condition is +met, the function returns \code{TRUE} to allow the download to proceed. +} +\keyword{auxiliary} +\keyword{internal} diff --git a/man/check_geom.Rd b/man/check_geom.Rd new file mode 100644 index 00000000..76c9d278 --- /dev/null +++ b/man/check_geom.Rd @@ -0,0 +1,23 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/calculate_covariates_auxiliary.R +\name{check_geom} +\alias{check_geom} +\title{Check that \code{geom} value is one of \code{FALSE}, \code{"sf"}, or \code{"terra"}} +\usage{ +check_geom(geom) +} +\arguments{ +\item{geom}{FALSE/"sf"/"terra".'} +} +\value{ +NULL; will stop if \code{geom} is not one of the three options +} +\description{ +Check that \code{geom} value is one of \code{FALSE}, \code{"sf"}, +or \code{"terra"}. +} +\author{ +Mitchell Manware +} +\keyword{auxiliary} +\keyword{internal} diff --git a/man/download_aqs.Rd b/man/download_aqs.Rd index 22f343a6..d276cd9b 100644 --- a/man/download_aqs.Rd +++ b/man/download_aqs.Rd @@ -69,6 +69,7 @@ downloaded files. Default is \code{FALSE}.} The \code{download_aqs()} function accesses and downloads Air Quality System (AQS) data from the \href{https://aqs.epa.gov/aqsweb/airdata/download_files.html}{U.S. Environmental Protection Agency's (EPA) Pre-Generated Data Files}. } \examples{ +\dontrun{ download_aqs( parameter_code = 88101, resolution_temporal = "daily", @@ -80,6 +81,7 @@ download_aqs( unzip = FALSE ) } +} \references{ \insertRef{data_usepa2023airdata}{amadeus} } diff --git a/man/download_cropscape.Rd b/man/download_cropscape.Rd index fc2fe5ae..6c72b10e 100644 --- a/man/download_cropscape.Rd +++ b/man/download_cropscape.Rd @@ -63,6 +63,7 @@ the \href{https://www.nass.usda.gov/Research_and_Science/Cropland/Release/index. JSON files should be found at STAC catalog of OpenLandMap } \examples{ +\dontrun{ download_cropscape( year = 2020, source = "USDA", @@ -73,6 +74,7 @@ download_cropscape( unzip = FALSE ) } +} \author{ Insang Song } diff --git a/man/download_data.Rd b/man/download_data.Rd index fd09977a..912901e9 100644 --- a/man/download_data.Rd +++ b/man/download_data.Rd @@ -49,6 +49,7 @@ The \code{download_data()} function accesses and downloads atmospheric, meteorol } } \examples{ +\dontrun{ download_data( dataset_name = "narr", variables = "weasd", @@ -59,6 +60,7 @@ download_data( remove_command = TRUE ) } +} \seealso{ For details of each download function per dataset, Please refer to: diff --git a/man/download_ecoregion.Rd b/man/download_ecoregion.Rd index 52f3266b..8a1f4677 100644 --- a/man/download_ecoregion.Rd +++ b/man/download_ecoregion.Rd @@ -74,6 +74,7 @@ certificate updates in the future. } } \examples{ +\dontrun{ download_ecoregion( directory_to_save = tempdir(), acknowledgement = TRUE, @@ -82,6 +83,7 @@ download_ecoregion( unzip = FALSE ) } +} \references{ \insertRef{article_omernik2014ecoregions}{amadeus} } diff --git a/man/download_geos.Rd b/man/download_geos.Rd index 43fe0b6d..8c67f38c 100644 --- a/man/download_geos.Rd +++ b/man/download_geos.Rd @@ -55,6 +55,7 @@ The \code{download_geos()} function accesses and downloads various atmospheric composition collections from \href{https://gmao.gsfc.nasa.gov/GEOS_systems/}{NASA's Global Earth Observing System (GEOS) model}. } \examples{ +\dontrun{ download_geos( collection = "aqc_tavg_1hr_g1440x721_v1", date = "2024-01-01", @@ -64,6 +65,7 @@ download_geos( remove_command = TRUE ) } +} \references{ \insertRef{keller_description_2021}{amadeus} } diff --git a/man/download_gmted.Rd b/man/download_gmted.Rd index 8a4e43c4..d7da768b 100644 --- a/man/download_gmted.Rd +++ b/man/download_gmted.Rd @@ -64,6 +64,7 @@ Multi-resolution Terrain Elevation Data (GMTED2010) from \href{https://www.usgs.gov/coastal-changes-and-impacts/gmted2010}{U.S. Geological Survey and National Geospatial-Intelligence Agency}. } \examples{ +\dontrun{ download_gmted( statistic = "Breakline Emphasis", resolution = "7.5 arc-seconds", @@ -74,6 +75,7 @@ download_gmted( unzip = FALSE ) } +} \references{ \insertRef{danielson_global_2011}{amadeus} } diff --git a/man/download_gridmet.Rd b/man/download_gridmet.Rd index 9552a3ea..75034d55 100644 --- a/man/download_gridmet.Rd +++ b/man/download_gridmet.Rd @@ -52,6 +52,7 @@ folder within \code{directory_to_save}. The \code{download_gridmet} function accesses and downloads gridded surface meteorological data from the \href{https://www.climatologylab.org/gridmet.html}{University of California Merced Climatology Lab's gridMET dataset}. } \examples{ +\dontrun{ download_gridmet( variables = "Precipitation", year = 2023, @@ -61,6 +62,7 @@ download_gridmet( remove_command = TRUE ) } +} \references{ \insertRef{article_abatzoglou2013development}{amadeus} } diff --git a/man/download_hash.Rd b/man/download_hash.Rd index f9f348f9..08252a71 100644 --- a/man/download_hash.Rd +++ b/man/download_hash.Rd @@ -2,7 +2,7 @@ % Please edit documentation in R/download_auxiliary.R \name{download_hash} \alias{download_hash} -\title{Create has of downloaded files.} +\title{Create hash of downloaded files.} \usage{ download_hash(hash = TRUE, dir = NULL) } @@ -12,10 +12,13 @@ download_hash(hash = TRUE, dir = NULL) \item{dir}{character(1). Directory path.} } \value{ -character(1) \code{rlang::hash} of downloaded files. +character(1) Combined SHA-1 hash of the files' contents and sizes. } \description{ -Create \code{rlang::hash_file} of the downloaded files. +Create a combined SHA-1 hash based on the contents and sizes of files +in a specified directory. System-specific metadata (e.g. full file paths, +access times, or user information) are not tracked, ensuring the hash +remains consistent across different systems, users, and access times. } \keyword{auxiliary} \keyword{internal} diff --git a/man/download_hms.Rd b/man/download_hms.Rd index cfcf4abd..0352c5c3 100644 --- a/man/download_hms.Rd +++ b/man/download_hms.Rd @@ -65,6 +65,7 @@ The \code{download_hms()} function accesses and downloads wildfire smoke plume coverage data from \href{https://www.ospo.noaa.gov/products/land/hms.html#0}{NOAA's Hazard Mapping System Fire and Smoke Product}. } \examples{ +\dontrun{ download_hms( data_format = "Shapefile", date = "2024-01-01", @@ -75,6 +76,7 @@ download_hms( unzip = FALSE ) } +} \references{ \insertRef{web_HMSabout}{amadeus} } diff --git a/man/download_huc.Rd b/man/download_huc.Rd index 8d26dc91..2734355d 100644 --- a/man/download_huc.Rd +++ b/man/download_huc.Rd @@ -61,6 +61,7 @@ For whom wants to download a specific region, please visit \href{https://www.epa.gov/waterdata/get-nhdplus-national-hydrography-dataset-plus-data#ListofAreas}{Get NHDPlus Data} } \examples{ +\dontrun{ download_huc( region = "Lower48", type = "Seamless", @@ -71,6 +72,7 @@ download_huc( unzip = FALSE ) } +} \references{ \insertRef{data_usgs2023nhd}{amadeus} } diff --git a/man/download_koppen_geiger.Rd b/man/download_koppen_geiger.Rd index 8309bcbc..60ac638f 100644 --- a/man/download_koppen_geiger.Rd +++ b/man/download_koppen_geiger.Rd @@ -65,6 +65,7 @@ Köppen-Geiger climate classification maps at 1-km resolution}(\href{https://www.nature.com/articles/sdata2018214}{link for article}; \href{https://figshare.com/articles/dataset/Present_and_future_K_ppen-Geiger_climate_classification_maps_at_1-km_resolution/6396959/2}{link for data}). } \examples{ +\dontrun{ download_koppen_geiger( data_resolution = "0.0083", time_period = "Present", @@ -75,6 +76,7 @@ download_koppen_geiger( unzip = FALSE ) } +} \references{ \insertRef{article_beck2023koppen}{amadeus} diff --git a/man/download_merra2.Rd b/man/download_merra2.Rd index 0b8f9e27..c51f019e 100644 --- a/man/download_merra2.Rd +++ b/man/download_merra2.Rd @@ -61,6 +61,7 @@ The \code{download_merra2()} function accesses and downloads various meteorological and atmospheric collections from \href{https://gmao.gsfc.nasa.gov/reanalysis/MERRA-2/}{NASA's Modern-Era Retrospective analysis for Research and Applications, Version 2 (MERRA-2) model}. } \examples{ +\dontrun{ download_merra2( collection = "inst1_2d_int_Nx", date = "2024-01-01", @@ -70,6 +71,7 @@ download_merra2( remove_command = TRUE, ) } +} \references{ \insertRef{data_gmao_merra-inst1_2d_asm_Nx}{amadeus} diff --git a/man/download_narr.Rd b/man/download_narr.Rd index 6498c00f..03b66db9 100644 --- a/man/download_narr.Rd +++ b/man/download_narr.Rd @@ -54,6 +54,7 @@ The \code{download_narr} function accesses and downloads daily meteorological da "Pressure levels" variables contain variable values at 29 atmospheric levels, ranging from 1000 hPa to 100 hPa. All pressure levels data will be downloaded for each variable. } \examples{ +\dontrun{ download_narr( variables = c("weasd", "omega"), year = 2023, @@ -63,6 +64,7 @@ download_narr( remove_command = TRUE ) } +} \references{ \insertRef{mesinger_north_2006}{amadeus} } diff --git a/man/download_nei.Rd b/man/download_nei.Rd index ed9d640a..9f12c7d7 100644 --- a/man/download_nei.Rd +++ b/man/download_nei.Rd @@ -75,6 +75,7 @@ certificate updates in the future. } } \examples{ +\dontrun{ download_nei( year = c(2017L, 2020L), directory_to_save = tempdir(), @@ -84,6 +85,7 @@ download_nei( unzip = FALSE ) } +} \references{ \insertRef{web_usepa2024nei}{amadeus} } diff --git a/man/download_nlcd.Rd b/man/download_nlcd.Rd index bf1b0956..c4cd8281 100644 --- a/man/download_nlcd.Rd +++ b/man/download_nlcd.Rd @@ -63,6 +63,7 @@ land cover data from the \href{https://www.mrlc.gov/data}{Multi-Resolution Land Characteristics (MRLC) Consortium's National Land Cover Database (NLCD) products data base}. } \examples{ +\dontrun{ download_nlcd( collection = "Coterminous United States", year = 2021, @@ -73,6 +74,7 @@ download_nlcd( unzip = FALSE ) } +} \references{ \insertRef{dewitz_national_2023}{amadeus}\if{html}{\out{
}} \insertRef{dewitz_national_2024}{amadeus} diff --git a/man/download_prism.Rd b/man/download_prism.Rd index 7cb4265b..ce9404c3 100644 --- a/man/download_prism.Rd +++ b/man/download_prism.Rd @@ -80,6 +80,7 @@ Accesses and downloads Oregon State University's PRISM data from the PRISM Climate Group Web Service } \examples{ +\dontrun{ download_prism( time = "202104", element = "ppt", @@ -91,6 +92,7 @@ download_prism( remove_command = TRUE ) } +} \references{ \insertRef{article_daly2000prism}{amadeus} diff --git a/man/download_sedac_groads.Rd b/man/download_sedac_groads.Rd index ff9987bb..db691d8b 100644 --- a/man/download_sedac_groads.Rd +++ b/man/download_sedac_groads.Rd @@ -59,9 +59,10 @@ respective sub-directories within \code{directory_to_save}. } \description{ The \code{download_sedac_groads()} function accesses and downloads -roads data from \href{https://sedac.ciesin.columbia.edu/data/set/groads-global-roads-open-access-v1/data-download}{NASA's Global Roads Open Access Data Set (gROADS), v1 (1980-2010)}. +roads data from \href{https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-groads-v1-1.00}{NASA's Global Roads Open Access Data Set (gROADS), v1 (1980-2010)}. } \examples{ +\dontrun{ download_sedac_groads( data_region = "Americas", data_format = "Shapefile", @@ -72,6 +73,7 @@ download_sedac_groads( unzip = FALSE ) } +} \references{ \insertRef{data_ciesin2013groads}{amadeus} } diff --git a/man/download_sedac_population.Rd b/man/download_sedac_population.Rd index 3f141c3e..58e17af0 100644 --- a/man/download_sedac_population.Rd +++ b/man/download_sedac_population.Rd @@ -63,9 +63,10 @@ respective sub-directories within \code{directory_to_save}. } \description{ The \code{download_sedac_population()} function accesses and downloads -population density data from \href{https://sedac.ciesin.columbia.edu/data/set/gpw-v4-population-density-adjusted-to-2015-unwpp-country-totals-rev11}{NASA's UN WPP-Adjusted Population Density, v4.11}. +population density data from \href{https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-gpwv4-apdens-wpp-2015-r11-4.11}{NASA's UN WPP-Adjusted Population Density, v4.11}. } \examples{ +\dontrun{ download_sedac_population( data_resolution = "30 second", data_format = "GeoTIFF", @@ -77,6 +78,7 @@ download_sedac_population( unzip = FALSE ) } +} \references{ \insertRef{data_ciesin2017gpwv4}{amadeus} } diff --git a/man/download_terraclimate.Rd b/man/download_terraclimate.Rd index 33b43fe8..56d2a222 100644 --- a/man/download_terraclimate.Rd +++ b/man/download_terraclimate.Rd @@ -51,6 +51,7 @@ folder within \code{directory_to_save}. The \code{download_terraclimate} function accesses and downloads climate and water balance data from the \href{https://www.climatologylab.org/terraclimate.html}{University of California Merced Climatology Lab's TerraClimate dataset}. } \examples{ +\dontrun{ download_terraclimate( variables = "Precipitation", year = 2023, @@ -60,6 +61,7 @@ download_terraclimate( remove_command = TRUE ) } +} \references{ \insertRef{article_abatzoglou2018terraclimate}{amadeus} } diff --git a/man/download_tri.Rd b/man/download_tri.Rd index 37fdfe79..18706237 100644 --- a/man/download_tri.Rd +++ b/man/download_tri.Rd @@ -45,6 +45,7 @@ downloaded files. Default is \code{FALSE}.} The \code{download_tri()} function accesses and downloads toxic release data from the \href{https://www.epa.gov/toxics-release-inventory-tri-program/tri-data-action-0}{U.S. Environmental Protection Agency's (EPA) Toxic Release Inventory (TRI) Program}. } \examples{ +\dontrun{ download_tri( year = 2021L, directory_to_save = tempdir(), @@ -53,6 +54,7 @@ download_tri( remove_command = TRUE ) } +} \references{ \insertRef{web_usepa2024tri}{amadeus} } diff --git a/man/process_aqs.Rd b/man/process_aqs.Rd index 1c5475cb..a4c66675 100644 --- a/man/process_aqs.Rd +++ b/man/process_aqs.Rd @@ -17,7 +17,7 @@ process_aqs( \arguments{ \item{path}{character(1). Directory path to daily measurement data.} -\item{date}{character(2). Start and end date. +\item{date}{character(1 or 2). Date (1) or start and end dates (2). Should be in \code{"YYYY-MM-DD"} format and sorted.} \item{mode}{character(1). One of diff --git a/man/process_covariates.Rd b/man/process_covariates.Rd index 8d5f3050..738ed8cf 100644 --- a/man/process_covariates.Rd +++ b/man/process_covariates.Rd @@ -39,7 +39,7 @@ data files before passing to \code{process_covariates}}. \dontrun{ process_covariates( covariate = "narr", - date = c("2018-01-01", "2018-01-01"), + date = c("2018-01-01", "2018-01-10"), variable = "weasd", path = system.file("extdata", "examples", "narr", "weasd") ) diff --git a/man/process_geos.Rd b/man/process_geos.Rd index 7666e650..de24324c 100644 --- a/man/process_geos.Rd +++ b/man/process_geos.Rd @@ -5,7 +5,7 @@ \title{Process atmospheric composition data} \usage{ process_geos( - date = c("2018-01-01", "2018-01-01"), + date = c("2018-01-01", "2018-01-10"), variable = NULL, path = NULL, extent = NULL, @@ -13,7 +13,8 @@ process_geos( ) } \arguments{ -\item{date}{character(2). length of 10. Format "YYYY-MM-DD".} +\item{date}{character(1 or 2). Date (1) or start and end dates (2). +Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01").} \item{variable}{character(1). GEOS-CF variable name(s).} diff --git a/man/process_gridmet.Rd b/man/process_gridmet.Rd index 63a56a7b..cd20b8b7 100644 --- a/man/process_gridmet.Rd +++ b/man/process_gridmet.Rd @@ -5,7 +5,7 @@ \title{Process gridMET data} \usage{ process_gridmet( - date = c("2023-09-01", "2023-09-01"), + date = c("2023-09-01", "2023-09-10"), variable = NULL, path = NULL, extent = NULL, @@ -13,8 +13,7 @@ process_gridmet( ) } \arguments{ -\item{date}{character(2). length of 10 each. -Start/end date of downloaded data. +\item{date}{character(1 or 2). Date (1) or start and end dates (2). Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01").} \item{variable}{character(1). Variable name or acronym code. See \href{https://www.climatologylab.org/wget-gridmet.html}{gridMET Generate Wget File} diff --git a/man/process_hms.Rd b/man/process_hms.Rd index b00283d5..d229328a 100644 --- a/man/process_hms.Rd +++ b/man/process_hms.Rd @@ -4,16 +4,10 @@ \alias{process_hms} \title{Process wildfire smoke data} \usage{ -process_hms( - date = c("2018-01-01", "2018-01-01"), - path = NULL, - extent = NULL, - ... -) +process_hms(date = "2018-01-01", path = NULL, extent = NULL, ...) } \arguments{ -\item{date}{character(2). length of 10 each. -Start/end date of downloaded data. +\item{date}{character(1 or 2). Date (1) or start and end dates (2). Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01").} \item{path}{character(1). Directory with downloaded NOAA HMS data files.} diff --git a/man/process_merra2.Rd b/man/process_merra2.Rd index e52989db..3d6ca4e1 100644 --- a/man/process_merra2.Rd +++ b/man/process_merra2.Rd @@ -5,7 +5,7 @@ \title{Process meteorological and atmospheric data} \usage{ process_merra2( - date = c("2018-01-01", "2018-01-01"), + date = c("2018-01-01", "2018-01-10"), variable = NULL, path = NULL, extent = NULL, @@ -13,7 +13,8 @@ process_merra2( ) } \arguments{ -\item{date}{character(2). length of 10. Format "YYYY-MM-DD".} +\item{date}{character(1 or 2). Date (1) or start and end dates (2). +Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01").} \item{variable}{character(1). MERRA2 variable name(s).} diff --git a/man/process_modis_sds.Rd b/man/process_modis_sds.Rd index a2b21c0e..84b494a0 100644 --- a/man/process_modis_sds.Rd +++ b/man/process_modis_sds.Rd @@ -48,7 +48,7 @@ Name" = MCD12C1.006, then \code{product = "MCD12C1"}. process_modis_sds(product = "MOD09GA") } \seealso{ -\link{calc_modis_par} +\link{calculate_modis_par} } \author{ Insang Song diff --git a/man/process_narr.Rd b/man/process_narr.Rd index d5e1848c..4c7a87d7 100644 --- a/man/process_narr.Rd +++ b/man/process_narr.Rd @@ -5,7 +5,7 @@ \title{Process meteorological data} \usage{ process_narr( - date = c("2023-09-01", "2023-09-01"), + date = "2023-09-01", variable = NULL, path = NULL, extent = NULL, @@ -13,8 +13,7 @@ process_narr( ) } \arguments{ -\item{date}{character(2). length of 10 each. -Start/end date of downloaded data. +\item{date}{character(1 or 2). Date (1) or start and end dates (2). Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01").} \item{variable}{character(1). Variable name acronym. See \href{https://ftp.cpc.ncep.noaa.gov/NARR/fixed/merged_land_AWIP32corrected.pdf}{List of Variables in NARR Files} @@ -43,7 +42,7 @@ pressure level, and date. ## amount of data which is not included in the package. \dontrun{ process_narr( - date = c("2018-01-01", "2018-01-01"), + date = c("2018-01-01", "2018-01-10"), variable = "weasd", path = "./tests/testdata/narr/weasd" ) diff --git a/man/process_sedac_groads.Rd b/man/process_sedac_groads.Rd index 9ead4008..d811e036 100644 --- a/man/process_sedac_groads.Rd +++ b/man/process_sedac_groads.Rd @@ -24,7 +24,7 @@ returning a single \code{SpatVector} object. \note{ U.S. context. The returned \code{SpatVector} object contains a \verb{$description} column to represent the temporal range covered by the -dataset. For more information, see \url{https://sedac.ciesin.columbia.edu/data/set/groads-global-roads-open-access-v1/metadata}. +dataset. For more information, see \url{https://earthdata.nasa.gov/data/catalog/sedac-ciesin-sedac-groads-v1-1.00}. } \examples{ ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large diff --git a/man/process_terraclimate.Rd b/man/process_terraclimate.Rd index da289314..aaf31048 100644 --- a/man/process_terraclimate.Rd +++ b/man/process_terraclimate.Rd @@ -5,7 +5,7 @@ \title{Process TerraClimate data} \usage{ process_terraclimate( - date = c("2023-09-01", "2023-09-01"), + date = c("2023-09-01", "2023-09-10"), variable = NULL, path = NULL, extent = NULL, @@ -13,8 +13,7 @@ process_terraclimate( ) } \arguments{ -\item{date}{character(2). length of 10 each. -Start/end date of downloaded data. +\item{date}{character(1 or 2). Date (1) or start and end dates (2). Format YYYY-MM-DD (ex. September 1, 2023 = "2023-09-01").} \item{variable}{character(1). Variable name or acronym code. See \href{https://climate.northwestknowledge.net/TERRACLIMATE/index_directDownloads.php}{TerraClimate Direct Downloads} diff --git a/man/process_tri.Rd b/man/process_tri.Rd index 78b52313..b7e3f2ff 100644 --- a/man/process_tri.Rd +++ b/man/process_tri.Rd @@ -33,7 +33,7 @@ This function imports and cleans raw toxic release data, returning a single \code{SpatVector} (points) object for the selected \code{year}. } \note{ -Visit \href{https://www.epa.gov/toxics-release-inventory-tri-program/tri-data-and-tools}{TRI Data and Tools} +Visit \href{https://www.epa.gov/toxics-release-inventory-tri-program/tri-toolbox}{TRI Data and Tools} to view the available years and variables. } \examples{ @@ -48,7 +48,7 @@ tri <- process_tri( } } \references{ -https://www.epa.gov/toxics-release-inventory-tri-program/tri-data-and-tools +https://www.epa.gov/toxics-release-inventory-tri-program/tri-toolbox } \author{ Insang Song, Mariana Kassien diff --git a/man/calc_sedc.Rd b/man/sum_edc.Rd similarity index 76% rename from man/calc_sedc.Rd rename to man/sum_edc.Rd index 14ee8b8c..a36b09db 100644 --- a/man/calc_sedc.Rd +++ b/man/sum_edc.Rd @@ -1,10 +1,10 @@ % Generated by roxygen2: do not edit by hand % Please edit documentation in R/calculate_covariates.R -\name{calc_sedc} -\alias{calc_sedc} +\name{sum_edc} +\alias{sum_edc} \title{Calculate Sum of Exponentially Decaying Contributions (SEDC) covariates} \usage{ -calc_sedc( +sum_edc( from = NULL, locs = NULL, locs_id = NULL, @@ -14,10 +14,11 @@ calc_sedc( ) } \arguments{ -\item{from}{\code{SpatVector} object. Locations where each SEDC is calculated.} +\item{from}{\code{SpatVector}(1). Point locations which contain point-source +covariate data.} -\item{locs}{\code{SpatVector} object. Locations where -the sum of SEDCs are calculated.} +\item{locs}{sf/SpatVector(1). Locations where the sum of exponentially +decaying contributions are calculated.} \item{locs_id}{character(1). Name of the unique id field in \code{point_to}.} @@ -27,9 +28,9 @@ Distance at which the source concentration is reduced to \item{target_fields}{character(varying). Field names in characters.} -\item{geom}{logical(1). Should the function return a \code{SpatVector}? -Default is \code{FALSE}. The coordinate reference system of the \code{SpatVector} is -that of \code{from.}} +\item{geom}{FALSE/"sf"/"terra".. Should the function return with geometry? +Default is \code{FALSE}, options with geometry are "sf" or "terra". The +coordinate reference system of the \code{sf} or \code{SpatVector} is that of \code{from.}} } \value{ a data.frame (tibble) or SpatVector object with input field names with @@ -47,15 +48,13 @@ Calculate Sum of Exponentially Decaying Contributions (SEDC) covariates } \note{ The function is originally from -\href{https://github.com/NIEHS/chopin}{chopin} +\href{https://github.com/ropensci/chopin}{chopin} Distance calculation is done with terra functions internally. Thus, the function internally converts sf objects in \code{point_*} arguments to terra. The threshold should be carefully chosen by users. } \examples{ -library(terra) -library(sf) set.seed(101) ncpath <- system.file("gpkg/nc.gpkg", package = "sf") nc <- terra::vect(ncpath) @@ -69,7 +68,7 @@ pnt_from$val1 <- rgamma(10L, 1, 0.05) pnt_from$val2 <- rgamma(10L, 2, 1) vals <- c("val1", "val2") -calc_sedc(pnt_locs, pnt_from, "NAME", 1e4, vals) +sum_edc(pnt_locs, pnt_from, "NAME", 1e4, vals) } \references{ \insertRef{messier2012integrating}{amadeus} diff --git a/tests/testthat/test-aqs.R b/tests/testthat/test-aqs.R index 57801bdc..5032485d 100644 --- a/tests/testthat/test-aqs.R +++ b/tests/testthat/test-aqs.R @@ -223,11 +223,21 @@ testthat::test_that("process_aqs", { return_format = "data.table" ) ) + testthat::expect_no_error( + aqslddsd <- process_aqs( + path = aqssub, + date = "2022-02-04", + mode = "location", + data_field = "Arithmetic.Mean", + return_format = "data.table" + ) + ) testthat::expect_s3_class(aqsfd, "data.table") testthat::expect_s3_class(aqssd, "data.table") testthat::expect_s3_class(aqssdd, "data.table") testthat::expect_s3_class(aqsld, "data.table") testthat::expect_s3_class(aqsldd, "data.table") + testthat::expect_s3_class(aqslddsd, "data.table") testthat::expect_no_error( aqssf <- process_aqs( @@ -261,7 +271,9 @@ testthat::test_that("process_aqs", { process_aqs(path = aqssub, date = c("January", "Januar")) ) testthat::expect_error( - process_aqs(path = aqssub, date = c("2021-08-15")) + process_aqs( + path = aqssub, date = c("2021-08-15", "2021-08-16", "2021-08-17") + ) ) testthat::expect_error( process_aqs(path = aqssub, date = NULL) diff --git a/tests/testthat/test-calc.R b/tests/testthat/test-calc.R index 3b950d48..6bf51b1e 100644 --- a/tests/testthat/test-calc.R +++ b/tests/testthat/test-calc.R @@ -1,9 +1,9 @@ ################################################################################ -##### unit and integration tests for calc_covariates and auxiliary functions +##### unit and integration test for calculate_covariates and auxiliary functions ################################################################################ -##### calc_covariates -testthat::test_that("calc_covariates (expected errors)", { +##### calculate_covariates +testthat::test_that("calculate_covariates (expected errors)", { withr::local_package("rlang") withr::local_package("terra") withr::local_package("sf") @@ -21,12 +21,12 @@ testthat::test_that("calc_covariates (expected errors)", { "tri", "nei", "prism", "huc", "cdl") for (cand in candidates) { testthat::expect_error( - calc_covariates(covariate = cand) + calculate_covariates(covariate = cand) ) } }) -testthat::test_that("calc_covariates (no errors)", { +testthat::test_that("calculate_covariates (no errors)", { withr::local_package("rlang") withr::local_package("terra") withr::local_package("sf") @@ -46,7 +46,7 @@ testthat::test_that("calc_covariates (no errors)", { ) testthat::expect_no_error( - tri_c <- calc_covariates( + tri_c <- calculate_covariates( covariate = "tri", from = tri_r, locs = ncpt, @@ -68,14 +68,14 @@ testthat::test_that("calc_covariates (no errors)", { "tri", "nei") for (cand in candidates) { testthat::expect_error( - calc_covariates(covariate = cand) + calculate_covariates(covariate = cand) ) } }) ################################################################################ -##### calc_lagged -testthat::test_that("calc_lagged (geom = FALSE)", { +##### calculate_lagged +testthat::test_that("calculate_lagged (geom = FALSE)", { withr::local_package("terra") withr::local_package("data.table") lags <- c(0, 1, 2) @@ -83,7 +83,7 @@ testthat::test_that("calc_lagged (geom = FALSE)", { ncp$site_id <- "3799900018810101" # expect function testthat::expect_true( - is.function(calc_lagged) + is.function(calculate_lagged) ) for (l in seq_along(lags)) { narr <- @@ -99,7 +99,7 @@ testthat::test_that("calc_lagged (geom = FALSE)", { ) ) narr_covariate <- - calc_narr( + calculate_narr( from = narr, locs = ncp, locs_id = "site_id", @@ -115,7 +115,7 @@ testthat::test_that("calc_lagged (geom = FALSE)", { ) # expect identical if lag = 0 if (lags[l] == 0) { - narr_lagged <- calc_lagged( + narr_lagged <- calculate_lagged( from = narr_covariate, date = c("2018-01-01", "2018-01-10"), lag = lags[l], @@ -126,7 +126,7 @@ testthat::test_that("calc_lagged (geom = FALSE)", { } else { # expect error because 2018-01-01 will not have lag data from 2017-12-31 testthat::expect_error( - calc_lagged( + calculate_lagged( from = narr_covariate, date = c("2018-01-01", "2018-01-10"), lag = lags[l], @@ -134,7 +134,7 @@ testthat::test_that("calc_lagged (geom = FALSE)", { time_id = "time" ) ) - narr_lagged <- calc_lagged( + narr_lagged <- calculate_lagged( from = narr_covariate, date = c("2018-01-05", "2018-01-10"), lag = lags[l], @@ -153,14 +153,14 @@ testthat::test_that("calc_lagged (geom = FALSE)", { } }) -testthat::test_that("calc_lagged (geom = TRUE)", { +testthat::test_that("calculate_lagged (geom = 'sf/terra')", { withr::local_package("terra") withr::local_package("data.table") ncp <- data.frame(lon = -78.8277, lat = 35.95013) ncp$site_id <- "3799900018810101" # expect function testthat::expect_true( - is.function(calc_lagged) + is.function(calculate_lagged) ) narr <- process_narr( date = c("2018-01-01", "2018-01-10"), @@ -174,7 +174,7 @@ testthat::test_that("calc_lagged (geom = TRUE)", { ) ) narr_covariate <- - calc_narr( + calculate_narr( from = narr, locs = ncp, locs_id = "site_id", @@ -189,13 +189,23 @@ testthat::test_that("calc_lagged (geom = TRUE)", { locs_id = "site_id" ) - # expect error with geom = TRUE and locs as data.frame + # expect error with geom = "terra" and locs as data.frame testthat::expect_error( - calc_lagged( + calculate_lagged( from = narr_covariate, date = c("2018-01-02", "2018-01-04"), lag = 1, - geom = TRUE + geom = "terra" + ) + ) + + # expect error with geom = "sf" and locs as data.frame + testthat::expect_error( + calculate_lagged( + from = narr_covariate, + date = c("2018-01-02", "2018-01-04"), + lag = 1, + geom = "sf" ) ) }) diff --git a/tests/testthat/test-download.R b/tests/testthat/test-download.R index 8e8a433d..c638511a 100644 --- a/tests/testthat/test-download.R +++ b/tests/testthat/test-download.R @@ -215,16 +215,104 @@ testthat::test_that("download_hash", { testthat::test_that("download_hash (LIVE)", { withr::with_tempdir({ - h <- download_narr( - year = c(2010, 2010), + h_first <- download_narr( + year = 2010, variables = "air.sfc", - directory_to_save = ".", + directory_to_save = "./first", + acknowledgement = TRUE, + download = TRUE, + remove_command = TRUE, + hash = TRUE + ) + testthat::expect_true( + is.character(h_first) + ) + h_second <- download_data( + dataset_name = "narr", + year = 2010, + variables = "air.sfc", + directory_to_save = "./second", + acknowledgement = TRUE, + download = TRUE, + remove_command = TRUE, + hash = TRUE + ) + testthat::expect_true( + is.character(h_second) + ) + testthat::expect_identical(h_first, h_second) + h_third <- download_narr( + year = 2011, + variables = "air.sfc", + directory_to_save = "./third", acknowledgement = TRUE, download = TRUE, - remove_command = TRUE + remove_command = TRUE, + hash = TRUE ) testthat::expect_true( - is.character(download_hash(TRUE, ".")) + is.character(h_third) + ) + testthat::expect_false( + identical(h_first, h_third) + ) + }) +}) + +################################################################################ +##### check_destfile +testthat::test_that("check_destfile", { + withr::with_tempdir({ + download_data( + dataset_name = "narr", + year = c(2010, 2011), + variables = "weasd", + directory_to_save = ".", + acknowledgement = TRUE, + download = FALSE, + hash = FALSE, + remove_command = FALSE + ) + c1 <- read_commands(list.files(".", pattern = "narr_2010_2011")) + # expect 2 files to download when files do not exist + testthat::expect_length(c1, 2) + + years <- seq(2013, 2015, 1) + files <- paste0("soilm/soilm.", years, ".nc") + dir.create("soilm") + lapply(files, file.create) + download_data( + dataset_name = "narr", + year = c(2013, 2015), + variables = "soilm", + directory_to_save = ".", + acknowledgement = TRUE, + download = FALSE, + hash = FALSE, + remove_command = FALSE + ) + c2 <- read_commands(list.files(".", pattern = "narr_2013_2015")) + # expect 3 files to download when file size = 0 + testthat::expect_length(c2, 3) + + dir.create("air.sfc") + file.create("air.sfc/air.sfc.2020.nc") + writeLines( + "These lines are to make sure the file size is greater than 0 bytes.", + "air.sfc/air.sfc.2020.nc" + ) + download_data( + dataset_name = "narr", + year = 2020, + variables = "air.sfc", + directory_to_save = ".", + acknowledgement = TRUE, + download = FALSE, + hash = FALSE, + remove_command = FALSE ) + c3 <- readLines(list.files(".", pattern = "narr_2020_2020")) + # expect 0 files to download when file exists and file size > 0 + testthat::expect_length(c3, 0) }) }) diff --git a/tests/testthat/test-dummies.R b/tests/testthat/test-dummies.R index dbe0b84d..6bef6b75 100644 --- a/tests/testthat/test-dummies.R +++ b/tests/testthat/test-dummies.R @@ -2,8 +2,8 @@ ##### unit and integration tests for Temporal Dummy functions ################################################################################ -##### calc_temporal_dummies -testthat::test_that("calc_temporal_dummies (no errors)", { +##### calculate_temporal_dummies +testthat::test_that("calculate_temporal_dummies (no errors)", { site_faux <- data.frame( @@ -14,7 +14,7 @@ testthat::test_that("calc_temporal_dummies (no errors)", { ) testthat::expect_no_error( - dum_res <- calc_temporal_dummies( + dum_res <- calculate_temporal_dummies( locs = site_faux, year = seq(2018L, 2022L) ) @@ -29,43 +29,60 @@ testthat::test_that("calc_temporal_dummies (no errors)", { # with geometry testthat::expect_no_error( - dum_res_geom <- calc_temporal_dummies( + dum_res_terra <- calculate_temporal_dummies( locs = site_faux, year = seq(2018L, 2022L), - geom = TRUE + geom = "terra" ) ) - testthat::expect_s4_class(dum_res_geom, "SpatVector") + testthat::expect_s4_class(dum_res_terra, "SpatVector") + + # with geometry + testthat::expect_no_error( + dum_res_sf <- calculate_temporal_dummies( + locs = site_faux, + year = seq(2018L, 2022L), + geom = "sf" + ) + ) + testthat::expect_true("sf" %in% class(dum_res_sf)) # error cases site_faux_err <- site_faux colnames(site_faux_err)[4] <- "date" testthat::expect_error( - dum_res <- calc_temporal_dummies( + dum_res <- calculate_temporal_dummies( locs = site_faux_err ) ) testthat::expect_error( - dum_res <- calc_temporal_dummies( + dum_res <- calculate_temporal_dummies( locs = as.matrix(site_faux_err) ) ) }) -testthat::test_that("calc_temporal_dummies (expected errors)", { +testthat::test_that("calculate_temporal_dummies (expected errors)", { withr::local_package("terra") ncp <- data.frame(lon = -78.8277, lat = 35.95013) ncp$site_id <- "3799900018810101" testthat::expect_error( - calc_temporal_dummies( + calculate_temporal_dummies( ncp ) ) testthat::expect_error( - calc_temporal_dummies( + calculate_temporal_dummies( terra::vect(ncp) ) ) + testthat::expect_error( + calculate_temporal_dummies( + locs = ncp, + year = seq(2018L, 2022L), + geom = TRUE + ) + ) }) diff --git a/tests/testthat/test-ecoregion.R b/tests/testthat/test-ecoregion.R index cd2b3f6d..e3d75c6d 100644 --- a/tests/testthat/test-ecoregion.R +++ b/tests/testthat/test-ecoregion.R @@ -155,8 +155,8 @@ testthat::test_that("process_ecoregion", { }) ################################################################################ -##### calc_ecoregion -testthat::test_that("calc_ecoregion", { +##### calculate_ecoregion +testthat::test_that("calculate_ecoregion", { withr::local_package("terra") withr::local_package("sf") withr::local_options(list(sf_use_s2 = FALSE)) @@ -182,7 +182,7 @@ testthat::test_that("calc_ecoregion", { ) testthat::expect_no_error( - ecor_res <- calc_ecoregion( + ecor_res <- calculate_ecoregion( from = erras, locs = sf::st_as_sf(site_faux), locs_id = "site_id" @@ -190,7 +190,7 @@ testthat::test_that("calc_ecoregion", { ) testthat::expect_no_error( - ecor_res <- calc_ecoregion( + ecor_res <- calculate_ecoregion( from = erras, locs = site_faux, locs_id = "site_id" @@ -208,18 +208,42 @@ testthat::test_that("calc_ecoregion", { ) testthat::expect_no_error( - ecor_geom <- calc_ecoregion( + ecor_terra <- calculate_ecoregion( from = erras, locs = site_faux, locs_id = "site_id", - geom = TRUE + geom = "terra" ) ) testthat::expect_equal( - ncol(ecor_geom), 4 + ncol(ecor_terra), 4 ) testthat::expect_true( - "SpatVector" %in% class(ecor_geom) + "SpatVector" %in% class(ecor_terra) + ) + + testthat::expect_no_error( + ecor_sf <- calculate_ecoregion( + from = erras, + locs = site_faux, + locs_id = "site_id", + geom = "sf" + ) + ) + testthat::expect_equal( + ncol(ecor_sf), 5 + ) + testthat::expect_true( + "sf" %in% class(ecor_sf) + ) + + testthat::expect_error( + calculate_ecoregion( + from = erras, + locs = site_faux, + locs_id = "site_id", + geom = TRUE + ) ) }) # nolint end diff --git a/tests/testthat/test-geos.R b/tests/testthat/test-geos.R index 2c11242b..82e0b72f 100644 --- a/tests/testthat/test-geos.R +++ b/tests/testthat/test-geos.R @@ -167,6 +167,86 @@ testthat::test_that("process_geos (no errors)", { ) }) +testthat::test_that("process_geos (single date)", { + withr::local_package("terra") + collections <- c( + "a", + "c" + ) + # expect function + expect_true( + is.function(process_geos) + ) + for (c in seq_along(collections)) { + collection <- collections[c] + geos <- + process_geos( + date = "2018-01-01", + variable = "O3", + path = + testthat::test_path( + "..", + "testdata", + "geos", + collection + ) + ) + # expect output is SpatRaster + expect_true( + class(geos)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(geos) + ) + # expect non-null coordinate reference system + expect_false( + terra::crs(geos) == "" + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(geos)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(geos)) + ) + # expect time dimension is POSIXt for hourly + expect_true( + "POSIXt" %in% class(terra::time(geos)) + ) + # expect seconds in time information + expect_true( + "seconds" %in% terra::timeInfo(geos) + ) + # expect dimensions according to collection + if (collection == "a") { + expect_true( + dim(geos)[3] == 1 + ) + } else if (collection == "c") { + expect_true( + dim(geos)[3] == 5 + ) + } + } + # test with cropping extent + testthat::expect_no_error( + geos_ext <- process_geos( + date = "2018-01-01", + variable = "O3", + path = + testthat::test_path( + "..", + "testdata", + "geos", + "c" + ), + extent = terra::ext(geos) + ) + ) +}) + testthat::test_that("process_geos (expected errors)", { # expect error without variable expect_error( @@ -182,8 +262,8 @@ testthat::test_that("process_geos (expected errors)", { }) ################################################################################ -##### calc_geos -testthat::test_that("calc_geos", { +##### calculate_geos +testthat::test_that("calculate_geos", { withr::local_package("terra") withr::local_package("data.table") collections <- c( @@ -195,7 +275,7 @@ testthat::test_that("calc_geos", { ncp$site_id <- "3799900018810101" # expect function expect_true( - is.function(calc_geos) + is.function(calculate_geos) ) for (c in seq_along(collections)) { collection <- collections[c] @@ -213,7 +293,7 @@ testthat::test_that("calc_geos", { ) ) geos_covariate <- - calc_geos( + calculate_geos( from = geos, locs = data.table::data.table(ncp), locs_id = "site_id", @@ -245,21 +325,50 @@ testthat::test_that("calc_geos", { ) } } - # with included geometry + # with included geometry terra testthat::expect_no_error( - geos_covariate_geom <- calc_geos( + geos_covariate_terra <- calculate_geos( from = geos, locs = ncp, locs_id = "site_id", radius = 0, fun = "mean", - geom = TRUE + geom = "terra" ) ) testthat::expect_equal( - ncol(geos_covariate_geom), 4 + ncol(geos_covariate_terra), 4 ) testthat::expect_true( - "SpatVector" %in% class(geos_covariate_geom) + "SpatVector" %in% class(geos_covariate_terra) + ) + + # with included geometry sf + testthat::expect_no_error( + geos_covariate_sf <- calculate_geos( + from = geos, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = "sf" + ) + ) + testthat::expect_equal( + ncol(geos_covariate_sf), 5 + ) + testthat::expect_true( + "sf" %in% class(geos_covariate_sf) + ) + + testthat::expect_error( + calculate_geos( + from = geos, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) ) }) diff --git a/tests/testthat/test-gmted.R b/tests/testthat/test-gmted.R index 16a9aa61..d1f8c6d4 100644 --- a/tests/testthat/test-gmted.R +++ b/tests/testthat/test-gmted.R @@ -1,6 +1,7 @@ ################################################################################ ##### unit and integration tests for USGS GMTED functions +################################################################################ ##### download_gmted testthat::test_that("download_gmted", { withr::local_package("httr") @@ -85,8 +86,8 @@ testthat::test_that("download_gmted", { } }) +################################################################################ ##### process_gmted -# test GMTED #### testthat::test_that("process_gmted (no errors)", { withr::local_package("terra") statistics <- c( @@ -200,8 +201,9 @@ testthat::test_that("process_gmted_codes (auxiliary)", { testthat::expect_equal(resoorig, "7.5 arc-seconds") }) -##### calc_gmted -testthat::test_that("calc_gmted", { +################################################################################ +##### download_gmted +testthat::test_that("calculate_gmted", { withr::local_package("terra") statistics <- c( "Breakline Emphasis", "Systematic Subsample" @@ -214,7 +216,7 @@ testthat::test_that("calc_gmted", { ncp$site_id <- "3799900018810101" # expect function expect_true( - is.function(calc_gmted) + is.function(calculate_gmted) ) for (s in seq_along(statistics)) { statistic <- statistics[s] @@ -232,7 +234,7 @@ testthat::test_that("calc_gmted", { ) ) gmted_covariate <- - calc_gmted( + calculate_gmted( from = gmted, locs = ncp, locs_id = "site_id", @@ -270,17 +272,41 @@ testthat::test_that("calc_gmted", { ) ) testthat::expect_no_error( - gmted_geom <- calc_gmted( + gmted_terra <- calculate_gmted( gmted, ncp, "site_id", - geom = TRUE + geom = "terra" ) ) testthat::expect_equal( - ncol(gmted_geom), 3 + ncol(gmted_terra), 3 ) testthat::expect_true( - "SpatVector" %in% class(gmted_geom) + "SpatVector" %in% class(gmted_terra) + ) + + testthat::expect_no_error( + gmted_sf <- calculate_gmted( + gmted, + ncp, + "site_id", + geom = "sf" + ) + ) + testthat::expect_equal( + ncol(gmted_sf), 4 + ) + testthat::expect_true( + "sf" %in% class(gmted_sf) + ) + + testthat::expect_error( + calculate_gmted( + gmted, + ncp, + "site_id", + geom = TRUE + ) ) }) diff --git a/tests/testthat/test-gridmet.R b/tests/testthat/test-gridmet.R index d6c65afd..568212a3 100644 --- a/tests/testthat/test-gridmet.R +++ b/tests/testthat/test-gridmet.R @@ -158,6 +158,66 @@ testthat::test_that("process_gridmet", { ) }) +testthat::test_that("process_gridmet (single date)", { + withr::local_package("terra") + variable <- "Precipitation" + # expect function + expect_true( + is.function(process_gridmet) + ) + gridmet <- + process_gridmet( + date = "2018-01-03", + variable = variable, + path = + testthat::test_path( + "..", + "testdata", + "gridmet", + "pr" + ) + ) + # expect output is SpatRaster + expect_true( + class(gridmet)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(gridmet) + ) + # expect non-null coordinate reference system + expect_false( + is.null(terra::crs(gridmet)) + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(gridmet)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(gridmet)) + ) + # expect dimensions according to levels + expect_true( + dim(gridmet)[3] == 1 + ) + # test with cropping extent + testthat::expect_no_error( + gridmet_ext <- process_gridmet( + date = "2018-01-03", + variable = "Precipitation", + path = + testthat::test_path( + "..", + "testdata", + "gridmet", + "pr" + ), + extent = terra::ext(gridmet) + ) + ) +}) + testthat::test_that("process_gridmet_codes", { # gridmet gc1 <- process_gridmet_codes("all") @@ -179,8 +239,8 @@ testthat::test_that("process_gridmet_codes", { }) ################################################################################ -##### calc_gridmet -testthat::test_that("calc_gridmet", { +##### calculate_gridmet +testthat::test_that("calculate_gridmet", { withr::local_package("terra") withr::local_package("data.table") radii <- c(0, 1000) @@ -188,7 +248,7 @@ testthat::test_that("calc_gridmet", { ncp$site_id <- "3799900018810101" # expect function expect_true( - is.function(calc_gridmet) + is.function(calculate_gridmet) ) for (r in seq_along(radii)) { gridmet <- @@ -204,7 +264,7 @@ testthat::test_that("calc_gridmet", { ) ) gridmet_covariate <- - calc_gridmet( + calculate_gridmet( from = gridmet, locs = data.table::data.table(ncp), locs_id = "site_id", @@ -235,21 +295,50 @@ testthat::test_that("calc_gridmet", { "POSIXt" %in% class(gridmet_covariate$time) ) } - # with included geometry + # with included geometry terra testthat::expect_no_error( - gridmet_covariate_geom <- calc_gridmet( + gridmet_covariate_terra <- calculate_gridmet( from = gridmet, locs = ncp, locs_id = "site_id", radius = 0, fun = "mean", - geom = TRUE + geom = "terra" + ) + ) + testthat::expect_equal( + ncol(gridmet_covariate_terra), 3 + ) + testthat::expect_true( + "SpatVector" %in% class(gridmet_covariate_terra) + ) + + # with included geometry sf + testthat::expect_no_error( + gridmet_covariate_sf <- calculate_gridmet( + from = gridmet, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = "sf" ) ) testthat::expect_equal( - ncol(gridmet_covariate_geom), 3 + ncol(gridmet_covariate_sf), 4 ) testthat::expect_true( - "SpatVector" %in% class(gridmet_covariate_geom) + "sf" %in% class(gridmet_covariate_sf) + ) + + testthat::expect_error( + calculate_gridmet( + from = gridmet, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) ) }) diff --git a/tests/testthat/test-groads.R b/tests/testthat/test-groads.R index a0bd9df5..ebf27c37 100644 --- a/tests/testthat/test-groads.R +++ b/tests/testthat/test-groads.R @@ -95,8 +95,8 @@ testthat::test_that("process_sedac_groads", { }) ################################################################################ -##### calc_sedac_groads -testthat::test_that("calc_groads", { +##### calculate_sedac_groads +testthat::test_that("calculate_groads", { withr::local_package("terra") withr::local_package("sf") withr::local_options(list(sf_use_s2 = FALSE)) @@ -113,7 +113,7 @@ testthat::test_that("calc_groads", { groads <- terra::vect(path_groads) testthat::expect_no_error( - groads_res <- calc_sedac_groads( + groads_res <- calculate_sedac_groads( from = groads, locs = ncp, locs_id = "site_id", @@ -122,7 +122,7 @@ testthat::test_that("calc_groads", { ) testthat::expect_error( - calc_sedac_groads( + calculate_sedac_groads( from = groads, locs = ncp, locs_id = "site_id", @@ -133,20 +133,47 @@ testthat::test_that("calc_groads", { # expect data.frame testthat::expect_s3_class(groads_res, "data.frame") - # return with geometry + # return with geometry terra testthat::expect_no_error( - groads_geom <- calc_sedac_groads( + groads_terra <- calculate_sedac_groads( from = groads, locs = ncp, locs_id = "site_id", radius = 5000, - geom = TRUE + geom = "terra" + ) + ) + testthat::expect_equal( + ncol(groads_terra), 4 + ) + testthat::expect_true( + "SpatVector" %in% class(groads_terra) + ) + + # return with geometry sf + testthat::expect_no_error( + groads_sf <- calculate_sedac_groads( + from = groads, + locs = ncp, + locs_id = "site_id", + radius = 5000, + geom = "sf" ) ) testthat::expect_equal( - ncol(groads_geom), 4 + ncol(groads_sf), 5 ) testthat::expect_true( - "SpatVector" %in% class(groads_geom) + "sf" %in% class(groads_sf) + ) + + testthat::expect_error( + calculate_sedac_groads( + from = groads, + locs = ncp, + locs_id = "site_id", + radius = 5000, + geom = TRUE + ) ) }) diff --git a/tests/testthat/test-hms.R b/tests/testthat/test-hms.R index 192dca74..90f83eda 100644 --- a/tests/testthat/test-hms.R +++ b/tests/testthat/test-hms.R @@ -176,6 +176,51 @@ testthat::test_that("process_hms (with polygons)", { ) }) +testthat::test_that("process_hms (single date)", { + withr::local_package("terra") + # expect function + testthat::expect_true( + is.function(process_hms) + ) + hms <- + process_hms( + date = "2022-06-10", + path = testthat::test_path( + "..", + "testdata", + "hms" + ) + ) + # expect output is a SpatVector or character + testthat::expect_true( + methods::is(hms, "SpatVector") + ) + # expect non-null coordinate reference system + testthat::expect_false( + is.null(terra::crs(hms)) + ) + # expect two columns + testthat::expect_true( + ncol(hms) == 2 + ) + # expect density and date column + testthat::expect_true( + all(c("Density", "Date") %in% names(hms)) + ) + # test with cropping extent + testthat::expect_no_error( + hms_ext <- process_hms( + date = "2022-06-10", + path = testthat::test_path( + "..", + "testdata", + "hms" + ), + extent = terra::ext(hms) + ) + ) +}) + testthat::test_that("process_hms (absent polygons - 12/31/2018)", { withr::local_package("terra") # expect function @@ -196,15 +241,15 @@ testthat::test_that("process_hms (absent polygons - 12/31/2018)", { }) ################################################################################ -##### calc_hms -testthat::test_that("calc_hms (no errors)", { +##### calculate_hms +testthat::test_that("calculate_hms (no errors)", { withr::local_package("terra") radii <- c(0, 1000) ncp <- data.frame(lon = -78.8277, lat = 35.95013) ncp$site_id <- "3799900018810101" # expect function expect_true( - is.function(calc_hms) + is.function(calculate_hms) ) for (r in seq_along(radii)) { hms <- @@ -217,7 +262,7 @@ testthat::test_that("calc_hms (no errors)", { ) ) hms_covariate <- - calc_hms( + calculate_hms( from = hms, locs = ncp, locs_id = "site_id", @@ -254,7 +299,7 @@ testthat::test_that("calc_hms (no errors)", { } }) -testthat::test_that("calc_hms (with geometry)", { +testthat::test_that("calculate_hms (with geometry)", { ncp <- data.frame(lon = -78.8277, lat = 35.95013) ncp$site_id <- "3799900018810101" hms_dir <- testthat::test_path( @@ -264,30 +309,52 @@ testthat::test_that("calc_hms (with geometry)", { date = c("2022-06-10", "2022-06-13"), path = hms_dir ) - hms_covariate_geom <- calc_hms( + hms_covariate_terra <- calculate_hms( from = hms, locs = ncp, locs_id = "site_id", radius = 0, - geom = TRUE + geom = "terra" ) # with geometry will have 5 columns testthat::expect_equal( - ncol(hms_covariate_geom), 5 + ncol(hms_covariate_terra), 5 ) testthat::expect_s4_class( - hms_covariate_geom, "SpatVector" + hms_covariate_terra, "SpatVector" + ) + hms_covariate_sf <- calculate_hms( + from = hms, + locs = ncp, + locs_id = "site_id", + radius = 0, + geom = "sf" + ) + # with geometry will have 6 columns + testthat::expect_equal( + ncol(hms_covariate_sf), 6 + ) + testthat::expect_true("sf" %in% class(hms_covariate_sf)) + + testthat::expect_error( + calculate_hms( + from = hms, + locs = ncp, + locs_id = "site_id", + radius = 0, + geom = TRUE + ) ) }) -testthat::test_that("calc_hms (absent polygons - 12/31/2018)", { +testthat::test_that("calculate_hms (absent polygons - 12/31/2018)", { withr::local_package("terra") radii <- c(0, 1000) ncp <- data.frame(lon = -78.8277, lat = 35.95013) ncp$site_id <- "3799900018810101" # expect function expect_true( - is.function(calc_hms) + is.function(calculate_hms) ) # expect function testthat::expect_true( @@ -303,7 +370,7 @@ testthat::test_that("calc_hms (absent polygons - 12/31/2018)", { ) ) for (r in seq_along(radii)) { - hms_covar <- calc_hms( + hms_covar <- calculate_hms( from = hms, locs = ncp, locs_id = "site_id", @@ -316,12 +383,12 @@ testthat::test_that("calc_hms (absent polygons - 12/31/2018)", { testthat::expect_equal(ncol(hms_covar), 7) } for (r in seq_along(radii)) { - hms_covar <- calc_hms( + hms_covar <- calculate_hms( from = hms, locs = ncp, locs_id = "site_id", radius = radii[r], - geom = TRUE + geom = "terra" ) # SpatVector testthat::expect_true(methods::is(hms_covar, "SpatVector")) diff --git a/tests/testthat/test-koppen-geiger.R b/tests/testthat/test-koppen-geiger.R index 3b580acd..a6054182 100644 --- a/tests/testthat/test-koppen-geiger.R +++ b/tests/testthat/test-koppen-geiger.R @@ -87,8 +87,8 @@ testthat::test_that("process_koppen_geiger", { }) ################################################################################ -##### calc_koppen_geiger -testthat::test_that("calc_koppen_geiger", { +##### calculate_koppen_geiger +testthat::test_that("calculate_koppen_geiger", { withr::local_package("terra") withr::local_package("sf") withr::local_options( @@ -109,13 +109,13 @@ testthat::test_that("calc_koppen_geiger", { ) testthat::expect_no_error( - kg_res <- calc_koppen_geiger( + kg_res <- calculate_koppen_geiger( from = kgras, locs = site_faux ) ) testthat::expect_no_error( - kg_res <- calc_koppen_geiger( + kg_res <- calculate_koppen_geiger( from = kgras, locs = sf::st_as_sf(site_faux) ) @@ -126,14 +126,24 @@ testthat::test_that("calc_koppen_geiger", { testthat::expect_equal(ncol(kg_res), 7) # should have only one climate zone testthat::expect_equal(sum(unlist(kg_res[, c(-1, -2)])), 1) - # with included geometry + # with included geometry (terra) testthat::expect_no_error( - kg_geom <- calc_koppen_geiger( + kg_terra <- calculate_koppen_geiger( from = kgras, locs = sf::st_as_sf(site_faux), - geom = TRUE + geom = "terra" ) ) - testthat::expect_equal(ncol(kg_geom), 7) - testthat::expect_true("SpatVector" %in% class(kg_geom)) + testthat::expect_equal(ncol(kg_terra), 7) + testthat::expect_true("SpatVector" %in% class(kg_terra)) + # with included geometry (sf) + testthat::expect_no_error( + kg_sf <- calculate_koppen_geiger( + from = kgras, + locs = sf::st_as_sf(site_faux), + geom = "sf" + ) + ) + testthat::expect_equal(ncol(kg_sf), 8) + testthat::expect_true("sf" %in% class(kg_sf)) }) diff --git a/tests/testthat/test-merra2.R b/tests/testthat/test-merra2.R index 9a640434..81166f23 100644 --- a/tests/testthat/test-merra2.R +++ b/tests/testthat/test-merra2.R @@ -1,6 +1,7 @@ ################################################################################ ##### unit and integration tests for NASA MERRA2 functions +################################################################################ ##### download_merra2 testthat::test_that("download_merra2 (no errors)", { withr::local_package("httr") @@ -94,6 +95,7 @@ testthat::test_that("download_merra2 (expected errors)", { ) }) +################################################################################ ##### process_merra2 testthat::test_that("process_merra2", { withr::local_package("terra") @@ -178,8 +180,92 @@ testthat::test_that("process_merra2", { ) }) -##### calc_merra2 -testthat::test_that("calc_merra2", { +testthat::test_that("process_merra2 (single date)", { + withr::local_package("terra") + #* indicates three dimensional data that has subset to single + #* pressure level for test data set + collection <- c( + "inst1_2d_int_Nx", "inst3_2d_gas_Nx", "inst3_3d_chm_Nv", #* + "inst6_3d_ana_Np", #* + "statD_2d_slv_Nx", "tavg1_2d_chm_Nx", "tavg3_3d_udt_Np" #* + ) + variable <- c( + "CPT", "AODANA", "AIRDENS", #* + "SLP", #* + "HOURNORAIN", "COCL", "DUDTANA" #* + ) + merra2_df <- data.frame(collection, variable) + # expect function + expect_true( + is.function(process_merra2) + ) + for (c in seq_along(merra2_df$collection)) { + merra2 <- + process_merra2( + date = "2018-01-01", + variable = merra2_df$variable[c], + path = + testthat::test_path( + "..", + "testdata", + "merra2", + merra2_df$collection[c] + ) + ) + # expect output is SpatRaster + expect_true( + class(merra2)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(merra2) + ) + # expect non-null coordinate reference system + expect_false( + terra::crs(merra2) == "" + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(merra2)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(merra2)) + ) + # expect time dimension is POSIXt for hourly + expect_true( + "POSIXt" %in% class(terra::time(merra2)) + ) + # expect seconds in time information + expect_true( + "seconds" %in% terra::timeInfo(merra2) + ) + # expect 8 levels for 3 hourly data + expect_true( + all(dim(merra2) == c(2, 3, 1)) + ) + } + class(merra2) + # test with cropping extent + testthat::expect_no_error( + merra2_ext <- process_merra2( + date = "2018-01-01", + variable = "CPT", + path = + testthat::test_path( + "..", + "testdata", + "merra2", + "inst1_2d_int_Nx" + ), + extent = terra::ext(merra2) + ) + ) +}) + +################################################################################ +##### calculate_merra2 +testthat::test_that("calculate_merra2", { withr::local_package("terra") withr::local_package("data.table") #* indicates three dimensional data that has subset to single @@ -199,7 +285,7 @@ testthat::test_that("calc_merra2", { ncp$site_id <- "3799900018810101" # expect function expect_true( - is.function(calc_merra2) + is.function(calculate_merra2) ) for (c in seq_along(collections)) { collection <- collections[c] @@ -218,7 +304,7 @@ testthat::test_that("calc_merra2", { ) ) merra2_covariate <- - calc_merra2( + calculate_merra2( from = merra2, locs = data.table::data.table(ncp), locs_id = "site_id", @@ -261,21 +347,50 @@ testthat::test_that("calc_merra2", { ) } } - # with included geometry + # with included geometry terra testthat::expect_no_error( - merra2_covariate_geom <- calc_merra2( + merra2_covariate_terra <- calculate_merra2( from = merra2, locs = ncp, locs_id = "site_id", radius = 0, fun = "mean", - geom = TRUE + geom = "terra" + ) + ) + testthat::expect_equal( + ncol(merra2_covariate_terra), 4 + ) + testthat::expect_true( + "SpatVector" %in% class(merra2_covariate_terra) + ) + + # with included geometry sf + testthat::expect_no_error( + merra2_covariate_sf <- calculate_merra2( + from = merra2, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = "sf" ) ) testthat::expect_equal( - ncol(merra2_covariate_geom), 4 + ncol(merra2_covariate_sf), 5 ) testthat::expect_true( - "SpatVector" %in% class(merra2_covariate_geom) + "sf" %in% class(merra2_covariate_sf) + ) + + testthat::expect_error( + calculate_merra2( + from = merra2, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) ) }) diff --git a/tests/testthat/test-modis.R b/tests/testthat/test-modis.R index cb2bc643..2c8d7c77 100644 --- a/tests/testthat/test-modis.R +++ b/tests/testthat/test-modis.R @@ -160,14 +160,10 @@ testthat::test_that("download_modis (MODIS-MOD06L2)", { remove_command = FALSE) # define file path with commands - commands_path <- paste0( + commands_path <- list.files( directory_to_save, - product, - "_", - date_start, - "_", - date_end, - "_wget_commands.txt" + pattern = "_wget_commands.txt", + full.names = TRUE ) # import commands commands <- read_commands(commands_path = commands_path)[, 2] @@ -635,7 +631,7 @@ testthat::test_that("process_modis (expected errors)", { ################################################################################ ##### calc_modis* -testthat::test_that("calc_modis_par", { +testthat::test_that("calculate_modis_par", { withr::local_package("sf") withr::local_package("terra") withr::local_package("stars") @@ -681,7 +677,7 @@ testthat::test_that("calc_modis_par", { testthat::expect_no_error( suppressWarnings( calc_mod11 <- - calc_modis_par( + calculate_modis_par( from = path_mod11, locs = sf::st_as_sf(site_faux), preprocess = process_modis_merge, @@ -698,7 +694,7 @@ testthat::test_that("calc_modis_par", { testthat::expect_no_error( suppressWarnings( calc_mod11 <- - calc_modis_par( + calculate_modis_par( from = path_mod11, locs = sf::st_as_sf(site_faux), preprocess = process_modis_merge, @@ -711,11 +707,11 @@ testthat::test_that("calc_modis_par", { ) ) - # with geometry + # with geometry terra testthat::expect_no_error( suppressWarnings( - calc_mod11_geom <- - calc_modis_par( + calc_mod11_terra <- + calculate_modis_par( from = path_mod11, locs = sf::st_as_sf(site_faux), preprocess = process_modis_merge, @@ -724,11 +720,45 @@ testthat::test_that("calc_modis_par", { name_covariates = c("MOD_LSTNT_0_", "MOD_LSTDY_0_"), subdataset = "(LST_)", nthreads = 1L, - geom = TRUE + geom = "terra" ) ) ) - testthat::expect_s4_class(calc_mod11_geom, "SpatVector") + testthat::expect_s4_class(calc_mod11_terra, "SpatVector") + + # with geometry sf + testthat::expect_no_error( + suppressWarnings( + calc_mod11_sf <- + calculate_modis_par( + from = path_mod11, + locs = sf::st_as_sf(site_faux), + preprocess = process_modis_merge, + package_list_add = c("MASS"), + export_list_add = c("aux"), + name_covariates = c("MOD_LSTNT_0_", "MOD_LSTDY_0_"), + subdataset = "(LST_)", + nthreads = 1L, + geom = "sf" + ) + ) + ) + testthat::expect_true("sf" %in% class(calc_mod11_sf)) + + # with geometry error + testthat::expect_error( + calculate_modis_par( + from = path_mod11, + locs = sf::st_as_sf(site_faux), + preprocess = process_modis_merge, + package_list_add = c("MASS"), + export_list_add = c("aux"), + name_covariates = c("MOD_LSTNT_0_", "MOD_LSTDY_0_"), + subdataset = "(LST_)", + nthreads = 1L, + geom = TRUE + ) + ) # case 2: swath mod06l2 path_mod06 <- @@ -750,7 +780,7 @@ testthat::test_that("calc_modis_par", { testthat::expect_no_error( suppressWarnings( calc_mod06 <- - calc_modis_par( + calculate_modis_par( from = path_mod06, locs = site_faux, subdataset = c("Cloud_Fraction_Day", "Cloud_Fraction_Night"), @@ -762,22 +792,52 @@ testthat::test_that("calc_modis_par", { ) testthat::expect_s3_class(calc_mod06, "data.frame") - # with geometry + # with geometry terra + testthat::expect_no_error( + suppressWarnings( + calc_mod06_terra <- + calculate_modis_par( + from = path_mod06, + locs = site_faux, + subdataset = c("Cloud_Fraction_Day", "Cloud_Fraction_Night"), + preprocess = process_modis_swath, + name_covariates = c("MOD_CLFRN_0_", "MOD_CLFRD_0_"), + nthreads = 1, + geom = "terra" + ) + ) + ) + testthat::expect_s4_class(calc_mod06_terra, "SpatVector") + + # with geometry sf testthat::expect_no_error( suppressWarnings( - calc_mod06_geom <- - calc_modis_par( + calc_mod06_sf <- + calculate_modis_par( from = path_mod06, locs = site_faux, subdataset = c("Cloud_Fraction_Day", "Cloud_Fraction_Night"), preprocess = process_modis_swath, name_covariates = c("MOD_CLFRN_0_", "MOD_CLFRD_0_"), nthreads = 1, - geom = TRUE + geom = "sf" ) ) ) - testthat::expect_s4_class(calc_mod06_geom, "SpatVector") + testthat::expect_true("sf" %in% class(calc_mod06_sf)) + + # with geometry error + testthat::expect_error( + calculate_modis_par( + from = path_mod06, + locs = site_faux, + subdataset = c("Cloud_Fraction_Day", "Cloud_Fraction_Night"), + preprocess = process_modis_swath, + name_covariates = c("MOD_CLFRN_0_", "MOD_CLFRD_0_"), + nthreads = 1, + geom = TRUE + ) + ) # case 3: VIIRS path_vnp46 <- @@ -797,7 +857,7 @@ testthat::test_that("calc_modis_par", { testthat::expect_no_error( suppressWarnings( calc_vnp46 <- - calc_modis_par( + calculate_modis_par( from = path_vnp46, locs = site_faux, preprocess = process_blackmarble, @@ -810,11 +870,11 @@ testthat::test_that("calc_modis_par", { ) testthat::expect_s3_class(calc_vnp46, "data.frame") - # with geometry (as SpatVector) + # with geometry terra testthat::expect_no_error( suppressWarnings( - calc_vnp46_geom_v <- - calc_modis_par( + calc_vnp46_terra <- + calculate_modis_par( from = path_vnp46, locs = site_faux, preprocess = process_blackmarble, @@ -822,18 +882,18 @@ testthat::test_that("calc_modis_par", { subdataset = 3L, nthreads = 1, tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)), - geom = TRUE + geom = "terra" ) ) ) - testthat::expect_s4_class(calc_vnp46_geom_v, "SpatVector") + testthat::expect_s4_class(calc_vnp46_terra, "SpatVector") - # with geometry (as sf) + # with geometry sf testthat::expect_no_error( suppressWarnings( - calc_vnp46_geom_sf <- - calc_modis_par( + calc_vnp46_sf <- + calculate_modis_par( from = path_vnp46, locs = sf::st_as_sf(site_faux), preprocess = process_blackmarble, @@ -841,11 +901,25 @@ testthat::test_that("calc_modis_par", { subdataset = 3L, nthreads = 1, tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)), - geom = TRUE + geom = "sf" ) ) ) - testthat::expect_s4_class(calc_vnp46_geom_sf, "SpatVector") + testthat::expect_true("sf" %in% class(calc_vnp46_sf)) + + # with geometry error + testthat::expect_error( + calculate_modis_par( + from = path_vnp46, + locs = sf::st_as_sf(site_faux), + preprocess = process_blackmarble, + name_covariates = c("MOD_NITLT_0_"), + subdataset = 3L, + nthreads = 1, + tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)), + geom = TRUE + ) + ) # error cases testthat::expect_error( @@ -869,28 +943,28 @@ testthat::test_that("calc_modis_par", { site_faux_r <- site_faux names(site_faux_r)[1] <- "ID" testthat::expect_error( - calc_modis_daily( + calculate_modis_daily( from = rast(nrow = 3, ncol = 3), date = "2021-08-15", locs = site_faux_r ) ) testthat::expect_error( - calc_modis_daily( + calculate_modis_daily( from = rast(nrow = 3, ncol = 3), date = "2021-08-15", locs = matrix(c(1, 3, 4, 5), nrow = 2) ) ) testthat::expect_error( - calc_modis_daily( + calculate_modis_daily( from = rast(nrow = 3, ncol = 3), date = "2021-08-15", locs = sf::st_as_sf(site_faux) ) ) testthat::expect_error( - calc_modis_daily( + calculate_modis_daily( from = terra::rast(nrow = 3, ncol = 3, vals = 1:9, names = "a"), date = "2021-08-15", locs = array(1:12, dim = c(2, 2, 3)) @@ -899,7 +973,7 @@ testthat::test_that("calc_modis_par", { site_faux0 <- site_faux names(site_faux0)[2] <- "date" testthat::expect_error( - calc_modis_daily( + calculate_modis_daily( from = rast(nrow = 3, ncol = 3), date = "2021-08-15", locs = sf::st_as_sf(site_faux0) @@ -921,7 +995,7 @@ testthat::test_that("calc_modis_par", { ) testthat::expect_no_error( - calc_modis_daily( + calculate_modis_daily( from = mcd_merge, date = "2021-08-15", locs = sf::st_as_sf(site_faux2), @@ -930,27 +1004,40 @@ testthat::test_that("calc_modis_par", { ) ) - # test calc_modis_daily directly with geometry + # test calculate_modis_daily directly with geometry terra testthat::expect_no_error( - calc_mod_geom <- calc_modis_daily( + calc_mod_terra <- calculate_modis_daily( from = mcd_merge, date = "2021-08-15", locs = sf::st_as_sf(site_faux2), radius = 1000, name_extracted = "MCD_EXTR_1K_", - geom = TRUE + geom = "terra" + ) + ) + testthat::expect_s4_class(calc_mod_terra, "SpatVector") + + # test calculate_modis_daily directly with geometry sf + testthat::expect_no_error( + calc_mod_sf <- calculate_modis_daily( + from = mcd_merge, + date = "2021-08-15", + locs = sf::st_as_sf(site_faux2), + radius = 1000, + name_extracted = "MCD_EXTR_1K_", + geom = "sf" ) ) - testthat::expect_s4_class(calc_mod_geom, "SpatVector") + testthat::expect_true("sf" %in% class(calc_mod_sf)) testthat::expect_error( - calc_modis_par(from = site_faux) + calculate_modis_par(from = site_faux) ) testthat::expect_error( - calc_modis_par(from = path_mod11, product = "MOD11A1", locs = list(1, 2, 3)) + calculate_modis_par(from = path_mod11, product = "MOD11A1", locs = list(1, 2, 3)) ) testthat::expect_error( - calc_modis_par( + calculate_modis_par( from = path_vnp46, locs = site_faux, preprocess = "fountain", @@ -960,7 +1047,7 @@ testthat::test_that("calc_modis_par", { ) ) testthat::expect_warning( - calc_modis_par( + calculate_modis_par( from = path_vnp46, locs = site_faux, preprocess = process_blackmarble, @@ -971,7 +1058,7 @@ testthat::test_that("calc_modis_par", { ) ) testthat::expect_warning( - flushed <- calc_modis_par( + flushed <- calculate_modis_par( from = path_vnp46, locs = site_faux, name_covariates = c("MOD_NITLT_0_"), diff --git a/tests/testthat/test-narr.R b/tests/testthat/test-narr.R index 7414f49b..a46bf4d2 100644 --- a/tests/testthat/test-narr.R +++ b/tests/testthat/test-narr.R @@ -79,6 +79,7 @@ testthat::test_that("narr_variable (expected errors)", { ) }) +################################################################################ ##### process_narr testthat::test_that("process_narr", { withr::local_package("terra") @@ -93,7 +94,79 @@ testthat::test_that("process_narr", { for (v in seq_along(variables)) { narr <- process_narr( - date = c("2018-01-01", "2018-01-01"), + date = c("2018-01-01", "2018-01-05"), + variable = variables[v], + path = + testthat::test_path( + "..", + "testdata", + "narr", + variables[v] + ) + ) + # expect output is SpatRaster + expect_true( + class(narr)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(narr) + ) + # expect non-null coordinate reference system + expect_false( + is.null(terra::crs(narr)) + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(narr)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(narr)) + ) + # expect dimensions according to levels + if (variables[v] == "weasd") { + expect_true( + dim(narr)[3] == 5 + ) + } else if (variables[v] == "omega") { + expect_true( + dim(narr)[3] == 145 + ) + } + } + # test with cropping extent + testthat::expect_no_error( + narr_ext <- + process_narr( + date = c("2018-01-01", "2018-01-05"), + variable = "omega", + path = + testthat::test_path( + "..", + "testdata", + "narr", + "omega" + ), + extent = terra::ext(narr) + ) + ) +}) + +testthat::test_that("process_narr (single date)", { + withr::local_package("terra") + variables <- c( + "weasd", + "omega" + ) + # expect function + expect_true( + is.function(process_narr) + ) + for (v in seq_along(variables)) { + narr <- + process_narr( + date = "2018-01-01", variable = variables[v], path = testthat::test_path( @@ -138,7 +211,7 @@ testthat::test_that("process_narr", { testthat::expect_no_error( narr_ext <- process_narr( - date = c("2018-01-01", "2018-01-01"), + date = "2018-01-01", variable = "omega", path = testthat::test_path( @@ -152,8 +225,9 @@ testthat::test_that("process_narr", { ) }) -##### calc_narr -testthat::test_that("calc_narr", { +################################################################################ +##### calculate_narr +testthat::test_that("calculate_narr", { withr::local_package("terra") variables <- c( "weasd", @@ -164,14 +238,14 @@ testthat::test_that("calc_narr", { ncp$site_id <- "3799900018810101" # expect function expect_true( - is.function(calc_narr) + is.function(calculate_narr) ) for (v in seq_along(variables)) { variable <- variables[v] for (r in seq_along(radii)) { narr <- process_narr( - date = c("2018-01-01", "2018-01-01"), + date = "2018-01-01", variable = variable, path = testthat::test_path( @@ -182,7 +256,7 @@ testthat::test_that("calc_narr", { ) ) narr_covariate <- - calc_narr( + calculate_narr( from = narr, locs = ncp, locs_id = "site_id", @@ -225,21 +299,49 @@ testthat::test_that("calc_narr", { ) } } - # with geometry + # with geometry terra testthat::expect_no_error( - narr_covariate_geom <- calc_narr( + narr_covariate_terra <- calculate_narr( from = narr, locs = ncp, locs_id = "site_id", radius = 0, fun = "mean", - geom = TRUE + geom = "terra" ) ) testthat::expect_equal( - ncol(narr_covariate_geom), 4 # 4 columns because omega has pressure levels + ncol(narr_covariate_terra), 4 # 4 columns because omega has pressure levels ) testthat::expect_true( - "SpatVector" %in% class(narr_covariate_geom) + "SpatVector" %in% class(narr_covariate_terra) + ) + # with geometry sf + testthat::expect_no_error( + narr_covariate_sf <- calculate_narr( + from = narr, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = "sf" + ) + ) + testthat::expect_equal( + ncol(narr_covariate_sf), 5 # 5 columns because omega has pressure levels + ) + testthat::expect_true( + "sf" %in% class(narr_covariate_sf) + ) + + testthat::expect_error( + calculate_narr( + from = narr, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) ) }) diff --git a/tests/testthat/test-nei.R b/tests/testthat/test-nei.R index 335c5d6c..fe7c7f8c 100644 --- a/tests/testthat/test-nei.R +++ b/tests/testthat/test-nei.R @@ -169,8 +169,8 @@ testthat::test_that("process_nei", { }) ################################################################################ -##### calc_nei -testthat::test_that("calc_nei", { +##### calculate_nei +testthat::test_that("calculate_nei", { withr::local_package("terra") withr::local_package("sf") withr::local_package("data.table") @@ -225,7 +225,7 @@ testthat::test_that("calc_nei", { process_nei(neipath, nc, year = 2083) ) - # calc_nei + # calculate_nei ncp <- data.frame(lon = -78.8277, lat = 35.95013) ncp$site_id <- "3799900018810101" ncp$time <- 2018L @@ -233,7 +233,7 @@ testthat::test_that("calc_nei", { nc <- terra::project(nc, "EPSG:4326") testthat::expect_no_error( - neicalced <- calc_nei( + neicalced <- calculate_nei( locs = ncp, from = neiras ) @@ -241,23 +241,40 @@ testthat::test_that("calc_nei", { testthat::expect_true(any(grepl("NEI", names(neicalced)))) testthat::expect_equal(neicalced$TRF_NEINP_0_00000, 1579079, tolerance = 1) - # with geometry + # with geometry terra testthat::expect_no_error( - neicalced_geom <- calc_nei( + neicalced_terra <- calculate_nei( locs = ncp, from = neiras, - geom = TRUE + geom = "terra" + ) + ) + testthat::expect_s4_class(neicalced_terra, "SpatVector") + + # with geometry sf + testthat::expect_no_error( + neicalced_sf <- calculate_nei( + locs = ncp, + from = neiras, + geom = "sf" ) ) - testthat::expect_s4_class(neicalced_geom, "SpatVector") + testthat::expect_true("sf" %in% class(neicalced_sf)) # more error cases testthat::expect_condition( - calc_nei( + calculate_nei( locs = "jittered", from = neiras ) ) + testthat::expect_error( + calculate_nei( + locs = ncp, + from = neiras, + geom = TRUE + ) + ) }) # nolint end diff --git a/tests/testthat/test-nlcd.R b/tests/testthat/test-nlcd.R index 1e61989b..acbc353e 100644 --- a/tests/testthat/test-nlcd.R +++ b/tests/testthat/test-nlcd.R @@ -117,8 +117,8 @@ testthat::test_that("process_nlcd", { }) ################################################################################ -##### calc_nlcd -testthat::test_that("calc_nlcd", { +##### calculate_nlcd +testthat::test_that("calculate_nlcd", { withr::local_package("terra") withr::local_package("exactextractr") withr::local_package("sf") @@ -149,52 +149,66 @@ testthat::test_that("calc_nlcd", { testthat::expect_s4_class(nlcdras, "SpatRaster") testthat::expect_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - radius = "1000"), + calculate_nlcd( + locs = eg_data, + from = nlcdras, + radius = "1000" + ), "radius is not a numeric." ) testthat::expect_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - mode = "whatnot", - radius = 1000) + calculate_nlcd( + locs = eg_data, + from = nlcdras, + mode = "whatnot", + radius = 1000 + ) ) # -- buf_radius has likely value testthat::expect_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - radius = -3), + calculate_nlcd( + locs = eg_data, + from = nlcdras, + radius = -3 + ), "radius has not a likely value." ) # -- two modes work properly testthat::expect_no_error( - calc_nlcd(locs = sf::st_as_sf(eg_data), - from = nlcdras, - mode = "exact", - radius = 1000) + calculate_nlcd( + locs = sf::st_as_sf(eg_data), + from = nlcdras, + mode = "exact", + radius = 1000 + ) ) testthat::expect_no_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - mode = "terra", - radius = 300) + calculate_nlcd( + locs = eg_data, + from = nlcdras, + mode = "terra", + radius = 300 + ) ) # -- multicore mode works properly testthat::expect_no_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - mode = "exact", - radius = 1000, - nthreads = 2L) + calculate_nlcd( + locs = eg_data, + from = nlcdras, + mode = "exact", + radius = 1000, + nthreads = 2L + ) ) testthat::expect_no_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - mode = "terra", - radius = 1000, - nthreads = 2L) + calculate_nlcd( + locs = eg_data, + from = nlcdras, + mode = "terra", + radius = 1000, + nthreads = 2L + ) ) @@ -215,13 +229,17 @@ testthat::test_that("calc_nlcd", { "NLCD data not available for this year." ) testthat::expect_error( - calc_nlcd(locs = 12, - locs_id = "site_id", - from = nlcdras) + calculate_nlcd( + locs = 12, + locs_id = "site_id", + from = nlcdras + ) ) testthat::expect_error( - calc_nlcd(locs = eg_data, - from = 12) + calculate_nlcd( + locs = eg_data, + from = 12 + ) ) # -- nlcd_path is not a character testthat::expect_error( @@ -241,14 +259,14 @@ testthat::test_that("calc_nlcd", { year <- 2021 buf_radius <- 3000 testthat::expect_no_error( - calc_nlcd( + calculate_nlcd( locs = eg_data, locs_id = "site_id", from = nlcdras, radius = buf_radius ) ) - output <- calc_nlcd( + output <- calculate_nlcd( locs = eg_data, locs_id = "site_id", radius = buf_radius, @@ -278,18 +296,44 @@ testthat::test_that("calc_nlcd", { testthat::expect_equal( ncol(output), 15 ) - output_geom <- calc_nlcd( + # example with terra output + output_terra <- calculate_nlcd( + locs = eg_data, + locs_id = "site_id", + radius = buf_radius, + from = nlcdras, + geom = "terra" + ) + # with geometry will have 15 columns + testthat::expect_equal( + ncol(output_terra), 15 + ) + testthat::expect_true( + "SpatVector" %in% class(output_terra) + ) + # example with sf output + output_sf <- calculate_nlcd( locs = eg_data, locs_id = "site_id", radius = buf_radius, from = nlcdras, - geom = TRUE + geom = "sf" ) - # with geometry will have 12 columns + # with geometry will have 16 columns testthat::expect_equal( - ncol(output_geom), 15 + ncol(output_sf), 16 ) testthat::expect_true( - "SpatVector" %in% class(output_geom) + "sf" %in% class(output_sf) + ) + # error with TRUE geom + testthat::expect_error( + calculate_nlcd( + locs = eg_data, + locs_id = "site_id", + radius = buf_radius, + from = nlcdras, + geom = TRUE + ) ) }) diff --git a/tests/testthat/test-population.R b/tests/testthat/test-population.R index d8d86548..fc03e0a1 100644 --- a/tests/testthat/test-population.R +++ b/tests/testthat/test-population.R @@ -189,8 +189,8 @@ testthat::test_that("process_sedac_codes", { }) ################################################################################ -##### calc_sedac_population -testthat::test_that("calc_sedac_population", { +##### calculate_sedac_population +testthat::test_that("calculate_sedac_population", { withr::local_package("terra") withr::local_package("data.table") paths <- list.files(testthat::test_path( @@ -201,7 +201,7 @@ testthat::test_that("calc_sedac_population", { ncp$site_id <- "3799900018810101" # expect function expect_true( - is.function(calc_sedac_population) + is.function(calculate_sedac_population) ) for (p in seq_along(paths)) { path <- paths[p] @@ -211,7 +211,7 @@ testthat::test_that("calc_sedac_population", { path = paths ) pop_covariate <- - calc_sedac_population( + calculate_sedac_population( from = pop, locs = data.table::data.table(ncp), locs_id = "site_id", @@ -243,21 +243,50 @@ testthat::test_that("calc_sedac_population", { ) } } - # with included geometry + # with included geometry terra testthat::expect_no_error( - pop_covariate_geom <- calc_sedac_population( + pop_covariate_terra <- calculate_sedac_population( from = pop, locs = ncp, locs_id = "site_id", radius = 0, fun = "mean", - geom = TRUE + geom = "terra" ) ) testthat::expect_equal( - ncol(pop_covariate_geom), 3 + ncol(pop_covariate_terra), 3 ) testthat::expect_true( - "SpatVector" %in% class(pop_covariate_geom) + "SpatVector" %in% class(pop_covariate_terra) + ) + + # with included geometry sf + testthat::expect_no_error( + pop_covariate_sf <- calculate_sedac_population( + from = pop, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = "sf" + ) + ) + testthat::expect_equal( + ncol(pop_covariate_sf), 4 + ) + testthat::expect_true( + "sf" %in% class(pop_covariate_sf) + ) + + testthat::expect_error( + calculate_sedac_population( + from = pop, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) ) }) diff --git a/tests/testthat/test-process.R b/tests/testthat/test-process.R index 443d782e..687c51f8 100644 --- a/tests/testthat/test-process.R +++ b/tests/testthat/test-process.R @@ -193,7 +193,7 @@ testthat::test_that("process_locs_vector", { ) # expect error when missing `lat` or `lon` expect_error( - calc_narr( + calculate_narr( from = narr, locs = subset( ncp, @@ -204,7 +204,7 @@ testthat::test_that("process_locs_vector", { ) # expect error when sites are SpatVector (points) expect_no_error( - calc_narr( + calculate_narr( from = narr, locs = terra::vect( ncp, @@ -216,7 +216,7 @@ testthat::test_that("process_locs_vector", { ) # expect error when sites are SpatVector (polygons) expect_no_error( - calc_narr( + calculate_narr( from = narr, locs = terra::buffer( terra::vect( @@ -231,7 +231,7 @@ testthat::test_that("process_locs_vector", { ) # expect error when sites are sf expect_no_error( - calc_narr( + calculate_narr( from = narr, locs = sf::st_as_sf( ncp, diff --git a/tests/testthat/test-sedc.R b/tests/testthat/test-sedc.R index 92d73fcd..f13de42a 100644 --- a/tests/testthat/test-sedc.R +++ b/tests/testthat/test-sedc.R @@ -2,8 +2,8 @@ ##### unit and integration tests for Sum of Exponential Decay functions ################################################################################ -##### calc_sedc -testthat::test_that("calc_sedc", { +##### sum_edc +testthat::test_that("sum_edc", { withr::local_package("terra") withr::local_package("sf") withr::local_package("dplyr") @@ -27,7 +27,7 @@ testthat::test_that("calc_sedc", { targcols <- grep("FUGITIVE_", names(tri_r), value = TRUE) testthat::expect_no_error( tri_sedc <- - calc_sedc( + sum_edc( locs = ncpt, from = tri_r, locs_id = "site_id", @@ -38,7 +38,7 @@ testthat::test_that("calc_sedc", { testthat::expect_s3_class(tri_sedc, "data.frame") testthat::expect_no_error( - calc_sedc( + sum_edc( locs = sf::st_as_sf(ncpt), from = sf::st_as_sf(tri_r), locs_id = "site_id", @@ -47,9 +47,34 @@ testthat::test_that("calc_sedc", { ) ) - # with geometry + # with geometry terra testthat::expect_no_error( - tri_sedc_geom <- calc_sedc( + tri_sedc_terra <- sum_edc( + locs = ncpt, + from = tri_r, + locs_id = "site_id", + sedc_bandwidth = 30000, + target_fields = targcols, + geom = "terra" + ) + ) + testthat::expect_s4_class(tri_sedc_terra, "SpatVector") + + # with geometry sf + testthat::expect_no_error( + tri_sedc_sf <- sum_edc( + locs = ncpt, + from = tri_r, + locs_id = "site_id", + sedc_bandwidth = 30000, + target_fields = targcols, + geom = "sf" + ) + ) + testthat::expect_true("sf" %in% class(tri_sedc_sf)) + + testthat::expect_error( + sum_edc( locs = ncpt, from = tri_r, locs_id = "site_id", @@ -58,13 +83,12 @@ testthat::test_that("calc_sedc", { geom = TRUE ) ) - testthat::expect_s4_class(tri_sedc_geom, "SpatVector") # warning case: duplicate field names between locs and from ncpta <- ncpt ncpta$YEAR <- 2018 testthat::expect_warning( - calc_sedc( + sum_edc( locs = ncpta, from = sf::st_as_sf(tri_r), locs_id = "site_id", diff --git a/tests/testthat/test-terraclimate.R b/tests/testthat/test-terraclimate.R index ed42fe1f..83bef548 100644 --- a/tests/testthat/test-terraclimate.R +++ b/tests/testthat/test-terraclimate.R @@ -158,6 +158,66 @@ testthat::test_that("process_terraclimate", { ) }) +testthat::test_that("process_terraclimate (single date)", { + withr::local_package("terra") + variable <- "ppt" + # expect function + expect_true( + is.function(process_terraclimate) + ) + terraclimate <- + process_terraclimate( + date = "2018-01-01", + variable = variable, + path = + testthat::test_path( + "..", + "testdata", + "terraclimate", + "ppt" + ) + ) + # expect output is SpatRaster + expect_true( + class(terraclimate)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(terraclimate) + ) + # expect non-null coordinate reference system + expect_false( + is.null(terra::crs(terraclimate)) + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(terraclimate)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(terraclimate)) + ) + # expect dimensions according to levels + expect_true( + dim(terraclimate)[3] == 1 + ) + # test with cropping extent + testthat::expect_no_error( + terraclimate_ext <- process_terraclimate( + date = "2018-01-01", + variable = "ppt", + path = + testthat::test_path( + "..", + "testdata", + "terraclimate", + "ppt" + ), + extent = terra::ext(terraclimate) + ) + ) +}) + testthat::test_that("process_terraclimate_codes", { # terraclimate tc1 <- process_terraclimate_codes("all") @@ -179,9 +239,8 @@ testthat::test_that("process_terraclimate_codes", { }) ################################################################################ -##### calc_terraclimate -## 16. TerraClimate #### -testthat::test_that("calc_terraclimate", { +##### calculate_terraclimate +testthat::test_that("calculate_terraclimate", { withr::local_package("terra") withr::local_package("data.table") radii <- c(0, 1000) @@ -189,7 +248,7 @@ testthat::test_that("calc_terraclimate", { ncp$site_id <- "3799900018810101" # expect function expect_true( - is.function(calc_terraclimate) + is.function(calculate_terraclimate) ) for (r in seq_along(radii)) { terraclimate <- @@ -205,7 +264,7 @@ testthat::test_that("calc_terraclimate", { ) ) terraclimate_covariate <- - calc_terraclimate( + calculate_terraclimate( from = terraclimate, locs = data.table::data.table(ncp), locs_id = "site_id", @@ -236,21 +295,50 @@ testthat::test_that("calc_terraclimate", { nchar(terraclimate_covariate$time)[1] == 6 ) } - # with included geometry + # with included geometry terra testthat::expect_no_error( - terraclimate_covariate_geom <- calc_terraclimate( + terraclimate_covariate_terra <- calculate_terraclimate( from = terraclimate, locs = ncp, locs_id = "site_id", radius = 0, fun = "mean", - geom = TRUE + geom = "terra" + ) + ) + testthat::expect_equal( + ncol(terraclimate_covariate_terra), 3 + ) + testthat::expect_true( + "SpatVector" %in% class(terraclimate_covariate_terra) + ) + + # with included geometry sf + testthat::expect_no_error( + terraclimate_covariate_sf <- calculate_terraclimate( + from = terraclimate, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = "sf" ) ) testthat::expect_equal( - ncol(terraclimate_covariate_geom), 3 + ncol(terraclimate_covariate_sf), 4 ) testthat::expect_true( - "SpatVector" %in% class(terraclimate_covariate_geom) + "sf" %in% class(terraclimate_covariate_sf) + ) + + testthat::expect_error( + calculate_terraclimate( + from = terraclimate, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) ) }) diff --git a/tests/testthat/test-tri.R b/tests/testthat/test-tri.R index ce981a81..7679ff32 100644 --- a/tests/testthat/test-tri.R +++ b/tests/testthat/test-tri.R @@ -106,8 +106,8 @@ testthat::test_that("process_tri", { }) ################################################################################ -##### calc_tri -testthat::test_that("calc_tri", { +##### calculate_tri +testthat::test_that("calculate_tri", { withr::local_package("terra") withr::local_package("sf") withr::local_package("dplyr") @@ -130,7 +130,7 @@ testthat::test_that("calc_tri", { testthat::expect_s4_class(tri_r, "SpatVector") testthat::expect_no_error( - tri_c <- calc_tri( + tri_c <- calculate_tri( from = tri_r, locs = ncpt, radius = c(1500L, 50000L) @@ -138,40 +138,60 @@ testthat::test_that("calc_tri", { ) testthat::expect_true(is.data.frame(tri_c)) - # with geometry + # with geometry terra testthat::expect_no_error( - tri_c_geom <- calc_tri( + tri_c_terra <- calculate_tri( + from = tri_r, + locs = ncpt, + radius = c(1500L, 50000L), + geom = "terra" + ) + ) + testthat::expect_s4_class(tri_c_terra, "SpatVector") + + # with geometry sf + testthat::expect_no_error( + tri_c_sf <- calculate_tri( + from = tri_r, + locs = ncpt, + radius = c(1500L, 50000L), + geom = "sf" + ) + ) + testthat::expect_true("sf" %in% class(tri_c_sf)) + + testthat::expect_error( + calculate_tri( from = tri_r, locs = ncpt, radius = c(1500L, 50000L), geom = TRUE ) ) - testthat::expect_s4_class(tri_c_geom, "SpatVector") testthat::expect_no_error( - calc_tri( + calculate_tri( from = tri_r, locs = sf::st_as_sf(ncpt), radius = 50000L ) ) testthat::expect_error( - calc_tri( + calculate_tri( from = tempdir(), locs = ncpt, radius = 50000L ) ) testthat::expect_error( - calc_tri( + calculate_tri( from = paste0(tdir, "/tri/"), locs = ncpt[, 1:2], radius = 50000L ) ) testthat::expect_error( - calc_tri( + calculate_tri( from = paste0(tdir, "/tri/"), locs = ncpt, radius = "As far as the Earth's radius" diff --git a/vignettes/workflow.Rmd b/vignettes/workflow.Rmd index 71b6ccb4..8e597b5d 100644 --- a/vignettes/workflow.Rmd +++ b/vignettes/workflow.Rmd @@ -84,13 +84,13 @@ time : 2021-12-28 to 2022-01-03 UTC\n" ## Calculate covariates -Calculate covariates for North Carolina county boundaries with `calc_covariates`. +Calculate covariates for North Carolina county boundaries with `calculate_covariates`. County boundaries are accessed with the `tigris::counties` function.\insertRef{package_tigris} `geom = TRUE` will return the covariates as a `SpatVector` object. ```{r, eval = FALSE} library(tigris) -temp_covar <- calc_covariates( +temp_covar <- calculate_covariates( covariate = "narr", from = temp_process, locs = tigris::counties("NC", year = 2021),