From f2e0c22beb125bea42d3298109495f7fcff7a9af Mon Sep 17 00:00:00 2001 From: fawda123 Date: Mon, 30 Nov 2015 17:17:00 -0600 Subject: [PATCH] fixes with submission - namespace is correct, examples are shorter --- NAMESPACE | 13 +++++++++++++ R/NeuralNetTools_gar.R | 5 ++--- R/NeuralNetTools_lek.R | 5 ++--- R/NeuralNetTools_old.R | 7 +++---- R/NeuralNetTools_plot.R | 6 ++---- R/NeuralNetTools_utils.R | 10 +++++++--- cran-comments.md | 19 +++++++++++++++++++ man/garson.Rd | 5 ++--- man/lekprofile.Rd | 5 ++--- man/neuralweights.Rd | 5 ++--- man/olden.Rd | 5 ++--- man/plotnet.Rd | 6 ++---- 12 files changed, 58 insertions(+), 33 deletions(-) diff --git a/NAMESPACE b/NAMESPACE index 7902b23..148cec0 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -41,3 +41,16 @@ import(nnet) import(reshape2) import(scales) import(tidyr) +importFrom(graphics,par) +importFrom(graphics,plot) +importFrom(graphics,points) +importFrom(graphics,segments) +importFrom(graphics,text) +importFrom(stats,coef) +importFrom(stats,formula) +importFrom(stats,kmeans) +importFrom(stats,model.frame) +importFrom(stats,predict) +importFrom(stats,quantile) +importFrom(stats,terms) +importFrom(utils,capture.output) diff --git a/R/NeuralNetTools_gar.R b/R/NeuralNetTools_gar.R index 9d86262..450c1fc 100644 --- a/R/NeuralNetTools_gar.R +++ b/R/NeuralNetTools_gar.R @@ -56,6 +56,7 @@ #' #' garson(mod) #' +#' \dontrun{ #' ## using RSNNS, no bias layers #' #' library(RSNNS) @@ -76,15 +77,12 @@ #' #' ## using caret #' -#' \dontrun{ #' library(caret) #' #' mod <- train(Y1 ~ X1 + X2 + X3, method = 'nnet', data = neuraldat, linout = TRUE) #' #' garson(mod) #' -#' } -#' #' ## modify the plot using ggplot2 syntax #' library(ggplot2) #' @@ -95,6 +93,7 @@ #' scale_y_continuous('Rel. Importance', limits = c(-1, 1)) + #' scale_fill_gradientn(colours = cols) + #' scale_colour_gradientn(colours = cols) +#'} garson <- function(mod_in, ...) UseMethod('garson') #' @rdname garson diff --git a/R/NeuralNetTools_lek.R b/R/NeuralNetTools_lek.R index f8d8b01..6938203 100644 --- a/R/NeuralNetTools_lek.R +++ b/R/NeuralNetTools_lek.R @@ -48,6 +48,7 @@ #' #' lekprofile(mod) #' +#' \dontrun{ #' ## using RSNNS, no bias layers #' #' library(RSNNS) @@ -76,15 +77,12 @@ #' #' ## using caret #' -#' \dontrun{ #' library(caret) #' #' mod <- train(Y1 ~ X1 + X2 + X3, method = 'nnet', data = neuraldat, linout = TRUE) #' #' lekprofile(mod) #' -#' } -#' #' ## group by clusters instead of sequencing by quantiles #' #' mod <- nnet(Y1 ~ X1 + X2 + X3, data = neuraldat, size = 5) @@ -98,6 +96,7 @@ #' names(group_vals) <- names(group_vals) #' #' lekprofile(mod, group_vals = group_vals, xsel = 'X3') +#' } lekprofile <- function(mod_in, ...) UseMethod('lekprofile') #' @rdname lekprofile diff --git a/R/NeuralNetTools_old.R b/R/NeuralNetTools_old.R index 9ce910d..d9e0536 100644 --- a/R/NeuralNetTools_old.R +++ b/R/NeuralNetTools_old.R @@ -55,6 +55,7 @@ #' #' olden(mod) #' +#' \dontrun{ #' ## View the difference for a model w/ skip layers #' #' set.seed(123) @@ -82,16 +83,13 @@ #' olden(mod) #' #' ## using caret -#' -#' \dontrun{ +#' #' library(caret) #' #' mod <- train(Y1 ~ X1 + X2 + X3, method = 'nnet', data = neuraldat, linout = TRUE) #' #' olden(mod) #' -#' } -#' #' ## multiple hidden layers #' #' x <- neuraldat[, c('X1', 'X2', 'X3')] @@ -99,6 +97,7 @@ #' mod <- mlp(x, y, size = c(5, 7, 6), linOut = TRUE) #' #' olden(mod) +#' } olden <- function(mod_in, ...) UseMethod('olden') #' @rdname olden diff --git a/R/NeuralNetTools_plot.R b/R/NeuralNetTools_plot.R index 13160e2..e8388ef 100644 --- a/R/NeuralNetTools_plot.R +++ b/R/NeuralNetTools_plot.R @@ -65,6 +65,7 @@ #' #' plotnet(mod, skip = TRUE) #' +#' \dontrun{ #' ## using RSNNS, no bias layers #' #' library(RSNNS) @@ -75,7 +76,6 @@ #' #' plotnet(mod) #' -#' \dontrun{ #' # pruned model using code from RSSNS pruning demo #' pruneFuncParams <- list(max_pr_error_increase = 10.0, pr_accepted_error = 1.0, #' no_of_pr_retrain_cycles = 1000, min_error_to_stop = 0.01, init_matrix_value = 1e-6, @@ -85,7 +85,6 @@ #' #' plotnet(mod) #' plotnet(mod, prune_col = 'lightblue') -#' } #' #' ## using neuralnet #' @@ -97,13 +96,11 @@ #' #' ## using caret #' -#' \dontrun{ #' library(caret) #' #' mod <- train(Y1 ~ X1 + X2 + X3, method = 'nnet', data = neuraldat, linout = TRUE) #' #' plotnet(mod) -#' } #' #' ## a more complicated network with categorical response #' AND <- c(rep(0, 7), 1) @@ -133,6 +130,7 @@ #' cols <- colorRampPalette(c('lightgreen', 'darkgreen'))(3)[rank(rel_imp)] #' #' plotnet(mod, circle_col = list(cols, 'lightblue')) +#' } plotnet <- function(mod_in, ...) UseMethod('plotnet') #' @rdname plotnet diff --git a/R/NeuralNetTools_utils.R b/R/NeuralNetTools_utils.R index 25dd3ca..333ed30 100644 --- a/R/NeuralNetTools_utils.R +++ b/R/NeuralNetTools_utils.R @@ -33,6 +33,7 @@ #' #' neuralweights(mod) #' +#' \dontrun{ #' ## using RSNNS, no bias layers #' #' library(RSNNS) @@ -43,7 +44,6 @@ #' #' neuralweights(mod) #' -#' \dontrun{ #' # pruned model using code from RSSNS pruning demo #' pruneFuncParams <- list(max_pr_error_increase = 10.0, pr_accepted_error = 1.0, #' no_of_pr_retrain_cycles = 1000, min_error_to_stop = 0.01, init_matrix_value = 1e-6, @@ -53,8 +53,6 @@ #' #' neuralweights(mod) #' -#' } -#' #' ## using neuralnet #' #' library(neuralnet) @@ -62,6 +60,7 @@ #' mod <- neuralnet(Y1 ~ X1 + X2 + X3, data = neuraldat, hidden = 5) #' #' neuralweights(mod) +#' } neuralweights <- function(mod_in, ...) UseMethod('neuralweights') #' @rdname neuralweights @@ -724,3 +723,8 @@ lekgrps <- function(grps){ return(p) } + +#' @importFrom graphics par plot points segments text +#' @importFrom stats coef formula kmeans model.frame predict quantile terms +#' @importFrom utils capture.output +NULL \ No newline at end of file diff --git a/cran-comments.md b/cran-comments.md index d03789f..36c863c 100644 --- a/cran-comments.md +++ b/cran-comments.md @@ -1,3 +1,22 @@ +## Resubmission +This is a resubmission. In this version I have removed the NOTE by adding the following to the NAMESPACE: + +importFrom(graphics,par) +importFrom(graphics,plot) +importFrom(graphics,points) +importFrom(graphics,segments) +importFrom(graphics,text) +importFrom(stats,coef) +importFrom(stats,formula) +importFrom(stats,kmeans) +importFrom(stats,model.frame) +importFrom(stats,predict) +importFrom(stats,quantile) +importFrom(stats,terms) +importFrom(utils,capture.output) + +I have also reduced all examples in the documentation to run in less than five seconds. + ## Test environments * local Windows 7 install, R 3.2.2 * local Windows 7 install, Current r-devel (2015-11-30 r69717) diff --git a/man/garson.Rd b/man/garson.Rd index fcf72a9..cb10bf6 100644 --- a/man/garson.Rd +++ b/man/garson.Rd @@ -76,6 +76,7 @@ mod <- nnet(Y1 ~ X1 + X2 + X3, data = neuraldat, size = 5) garson(mod) +\dontrun{ ## using RSNNS, no bias layers library(RSNNS) @@ -96,15 +97,12 @@ garson(mod) ## using caret -\dontrun{ library(caret) mod <- train(Y1 ~ X1 + X2 + X3, method = 'nnet', data = neuraldat, linout = TRUE) garson(mod) -} - ## modify the plot using ggplot2 syntax library(ggplot2) @@ -116,6 +114,7 @@ garson(mod) + scale_fill_gradientn(colours = cols) + scale_colour_gradientn(colours = cols) } +} \references{ Garson, G.D. 1991. Interpreting neural network connection weights. Artificial Intelligence Expert. 6(4):46-51. diff --git a/man/lekprofile.Rd b/man/lekprofile.Rd index 30b8e3e..8bde47e 100644 --- a/man/lekprofile.Rd +++ b/man/lekprofile.Rd @@ -70,6 +70,7 @@ mod <- nnet(Y1 ~ X1 + X2 + X3, data = neuraldat, size = 5) lekprofile(mod) +\dontrun{ ## using RSNNS, no bias layers library(RSNNS) @@ -98,15 +99,12 @@ lekprofile(mod) ## using caret -\dontrun{ library(caret) mod <- train(Y1 ~ X1 + X2 + X3, method = 'nnet', data = neuraldat, linout = TRUE) lekprofile(mod) -} - ## group by clusters instead of sequencing by quantiles mod <- nnet(Y1 ~ X1 + X2 + X3, data = neuraldat, size = 5) @@ -121,6 +119,7 @@ names(group_vals) <- names(group_vals) lekprofile(mod, group_vals = group_vals, xsel = 'X3') } +} \references{ Lek, S., Delacoste, M., Baran, P., Dimopoulos, I., Lauga, J., Aulagnier, S. 1996. Application of neural networks to modelling nonlinear relationships in Ecology. Ecological Modelling. 90:39-52. diff --git a/man/neuralweights.Rd b/man/neuralweights.Rd index 633a7a0..ffa423f 100644 --- a/man/neuralweights.Rd +++ b/man/neuralweights.Rd @@ -57,6 +57,7 @@ mod <- nnet(Y1 ~ X1 + X2 + X3, data = neuraldat, size = 5, linout = TRUE) neuralweights(mod) +\dontrun{ ## using RSNNS, no bias layers library(RSNNS) @@ -67,7 +68,6 @@ mod <- mlp(x, y, size = 5, linOut = TRUE) neuralweights(mod) -\dontrun{ # pruned model using code from RSSNS pruning demo pruneFuncParams <- list(max_pr_error_increase = 10.0, pr_accepted_error = 1.0, no_of_pr_retrain_cycles = 1000, min_error_to_stop = 0.01, init_matrix_value = 1e-6, @@ -77,8 +77,6 @@ mod <- mlp(x, y, size = 5, pruneFunc = "OptimalBrainSurgeon", neuralweights(mod) -} - ## using neuralnet library(neuralnet) @@ -87,4 +85,5 @@ mod <- neuralnet(Y1 ~ X1 + X2 + X3, data = neuraldat, hidden = 5) neuralweights(mod) } +} diff --git a/man/olden.Rd b/man/olden.Rd index 5b9324c..0303da0 100644 --- a/man/olden.Rd +++ b/man/olden.Rd @@ -78,6 +78,7 @@ mod <- nnet(Y1 ~ X1 + X2 + X3, data = neuraldat, size = 5) olden(mod) +\dontrun{ ## View the difference for a model w/ skip layers set.seed(123) @@ -106,15 +107,12 @@ olden(mod) ## using caret -\dontrun{ library(caret) mod <- train(Y1 ~ X1 + X2 + X3, method = 'nnet', data = neuraldat, linout = TRUE) olden(mod) -} - ## multiple hidden layers x <- neuraldat[, c('X1', 'X2', 'X3')] @@ -123,6 +121,7 @@ mod <- mlp(x, y, size = c(5, 7, 6), linOut = TRUE) olden(mod) } +} \references{ Garson, G.D. 1991. Interpreting neural network connection weights. Artificial Intelligence Expert. 6(4):46-51. diff --git a/man/plotnet.Rd b/man/plotnet.Rd index c87e229..da5057d 100644 --- a/man/plotnet.Rd +++ b/man/plotnet.Rd @@ -119,6 +119,7 @@ mod <- nnet(Y1 ~ X1 + X2 + X3, data = neuraldat, size = 5, skip = TRUE) plotnet(mod, skip = TRUE) +\dontrun{ ## using RSNNS, no bias layers library(RSNNS) @@ -129,7 +130,6 @@ mod <- mlp(x, y, size = 5) plotnet(mod) -\dontrun{ # pruned model using code from RSSNS pruning demo pruneFuncParams <- list(max_pr_error_increase = 10.0, pr_accepted_error = 1.0, no_of_pr_retrain_cycles = 1000, min_error_to_stop = 0.01, init_matrix_value = 1e-6, @@ -139,7 +139,6 @@ mod <- mlp(x, y, size = 5, pruneFunc = "OptimalBrainSurgeon", plotnet(mod) plotnet(mod, prune_col = 'lightblue') -} ## using neuralnet @@ -151,13 +150,11 @@ plotnet(mod) ## using caret -\dontrun{ library(caret) mod <- train(Y1 ~ X1 + X2 + X3, method = 'nnet', data = neuraldat, linout = TRUE) plotnet(mod) -} ## a more complicated network with categorical response AND <- c(rep(0, 7), 1) @@ -188,6 +185,7 @@ cols <- colorRampPalette(c('lightgreen', 'darkgreen'))(3)[rank(rel_imp)] plotnet(mod, circle_col = list(cols, 'lightblue')) } +} \references{ Ozesmi, S.L., Ozesmi, U. 1999. An artificial neural network approach to spatial habitat modeling with interspecific interaction. Ecological Modelling. 116:15-31. }