From 8504a3cc7b87142ebabef93939a8812883e14725 Mon Sep 17 00:00:00 2001 From: bjaeger Date: Wed, 11 Oct 2023 20:36:14 -0400 Subject: [PATCH] cran submission 1, v0.1.0 --- CRAN-SUBMISSION | 6 +++--- R/roxy.R | 2 +- man/orsf_vi.Rd | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CRAN-SUBMISSION b/CRAN-SUBMISSION index 167a2fb6..e50ee393 100644 --- a/CRAN-SUBMISSION +++ b/CRAN-SUBMISSION @@ -1,3 +1,3 @@ -Version: 0.0.7 -Date: 2023-01-11 17:16:51 UTC -SHA: 4f4711f038834c4ef7b97a1056964939f590d8b1 +Version: 0.1.0 +Date: 2023-10-12 00:32:23 UTC +SHA: 65cf27efc6b936f9a599c679fd833a142c6d6a4d diff --git a/R/roxy.R b/R/roxy.R index 40844894..a96ebd4c 100644 --- a/R/roxy.R +++ b/R/roxy.R @@ -271,7 +271,7 @@ roxy_dots <- function(){ roxy_vi_describe <- function(type){ switch(type, - 'negate' = "Each variable is assessed separately by multiplying the variable's coefficients by -1 and then determining how much the model's performance changes. The worse the model's performance after negating coefficients for a given variable, the more important the variable. This technique is promising b/c it does not require permutation and it emphasizes variables with larger coefficients in linear combinations, but it is also relatively new and hasn't been studied as much as permutation importance. See [Jaeger, 2023](https://doi.org/10.1080/10618600.2023.2231048) for more details on this technique.", + 'negate' = "Each variable is assessed separately by multiplying the variable's coefficients by -1 and then determining how much the model's performance changes. The worse the model's performance after negating coefficients for a given variable, the more important the variable. This technique is promising b/c it does not require permutation and it emphasizes variables with larger coefficients in linear combinations, but it is also relatively new and hasn't been studied as much as permutation importance. See Jaeger, (2023) for more details on this technique.", 'permute' = "Each variable is assessed separately by randomly permuting the variable's values and then determining how much the model's performance changes. The worse the model's performance after permuting the values of a given variable, the more important the variable. This technique is flexible, intuitive, and frequently used. It also has several [known limitations](https://christophm.github.io/interpretable-ml-book/feature-importance.html#disadvantages-9)", 'anova' = "A p-value is computed for each coefficient in each linear combination of variables in each decision tree. Importance for an individual predictor variable is the proportion of times a p-value for its coefficient is < 0.01. This technique is very efficient computationally, but may not be as effective as permutation or negation in terms of selecting signal over noise variables. See [Menze, 2011](https://link.springer.com/chapter/10.1007/978-3-642-23783-6_29) for more details on this technique.") diff --git a/man/orsf_vi.Rd b/man/orsf_vi.Rd index 9d27d46c..a6376b7f 100644 --- a/man/orsf_vi.Rd +++ b/man/orsf_vi.Rd @@ -109,7 +109,7 @@ or not (see examples). \section{Variable importance methods}{ -\strong{negation importance}: Each variable is assessed separately by multiplying the variable's coefficients by -1 and then determining how much the model's performance changes. The worse the model's performance after negating coefficients for a given variable, the more important the variable. This technique is promising b/c it does not require permutation and it emphasizes variables with larger coefficients in linear combinations, but it is also relatively new and hasn't been studied as much as permutation importance. See \href{https://doi.org/10.1080/10618600.2023.2231048}{Jaeger, 2023} for more details on this technique. +\strong{negation importance}: Each variable is assessed separately by multiplying the variable's coefficients by -1 and then determining how much the model's performance changes. The worse the model's performance after negating coefficients for a given variable, the more important the variable. This technique is promising b/c it does not require permutation and it emphasizes variables with larger coefficients in linear combinations, but it is also relatively new and hasn't been studied as much as permutation importance. See Jaeger, (2023) for more details on this technique. \strong{permutation importance}: Each variable is assessed separately by randomly permuting the variable's values and then determining how much the model's performance changes. The worse the model's performance after permuting the values of a given variable, the more important the variable. This technique is flexible, intuitive, and frequently used. It also has several \href{https://christophm.github.io/interpretable-ml-book/feature-importance.html#disadvantages-9}{known limitations}