if (FALSE) {
library(dplyr)
# get cumulative performance for a single model
sensspec_1 <- calc_model_sensspec(
otu_mini_bin_results_glmnet$trained_model,
otu_mini_bin_results_glmnet$test_data,
"dx"
)
head(sensspec_1)
# get performance for multiple models
get_sensspec_seed <- function(seed) {
ml_result <- run_ml(otu_mini_bin, "glmnet", seed = seed)
sensspec <- calc_model_sensspec(
ml_result$trained_model,
ml_result$test_data,
"dx"
) %>%
dplyr::mutate(seed = seed)
return(sensspec)
}
sensspec_dat <- purrr::map_dfr(seq(100, 102), get_sensspec_seed)
# calculate mean sensitivity over specificity
roc_dat <- calc_mean_roc(sensspec_dat)
head(roc_dat)
# calculate mean precision over recall
prc_dat <- calc_mean_prc(sensspec_dat)
head(prc_dat)
# plot ROC & PRC
roc_dat %>% plot_mean_roc()
baseline_prec <- calc_baseline_precision(otu_mini_bin, "dx", "cancer")
prc_dat %>%
plot_mean_prc(baseline_precision = baseline_prec)
# balanced precision
prior <- calc_baseline_precision(otu_mini_bin,
outcome_colname = "dx",
pos_outcome = "cancer"
)
bprc_dat <- sensspec_dat %>%
dplyr::mutate(balanced_precision = calc_balanced_precision(precision, prior)) %>%
dplyr::rename(recall = sensitivity) %>%
calc_mean_perf(group_var = recall, sum_var = balanced_precision)
bprc_dat %>% plot_mean_prc(ycol = mean_balanced_precision) + ylab("Mean Bal. Precision")
}
Run the code above in your browser using DataLab