From add4d4a9f6e663f4898a560ab4323fe171ab2e33 Mon Sep 17 00:00:00 2001 From: Kyle Belanger Date: Fri, 17 Mar 2023 16:13:34 -0400 Subject: [PATCH] add new script --- ML/2-modeling-reg.R | 133 ++++++++++++++++++++++++++++++++++++++++++++ ML/2-modeling.R | 108 ++++++++++++++++++++--------------- 2 files changed, 196 insertions(+), 45 deletions(-) create mode 100644 ML/2-modeling-reg.R diff --git a/ML/2-modeling-reg.R b/ML/2-modeling-reg.R new file mode 100644 index 0000000..e8f2c15 --- /dev/null +++ b/ML/2-modeling-reg.R @@ -0,0 +1,133 @@ +# The following script is for training and testing Regression models +rm(list = ls(all.names = TRUE)) # Clear the memory of variables from previous run. +cat("\014") # Clear the console + + +# load packages ----------------------------------------------------------- + +box::use( + magrittr[`%>%`] + ,here[here] + ,readr + ,gp2 = ggplot2[ggplot, aes] + ,rsample + ,r = recipes + ,wf = workflows + ,p = parsnip[tune] + ,ys = yardstick + ,d = dials + ,rsamp = rsample + ,tune +) + + + +# globals ----------------------------------------------------------------- + +set.seed(070823) #set seed for reproducible research + + +# load-data --------------------------------------------------------------- + +model_data <- readr$read_rds(here("ML","data-unshared","model_data.RDS")) + + +# split data -------------------------------------------------------------- + +model_data_split <- rsample$initial_split( + model_data + ,prop = 0.80 + ,strata = ft4_dia +) + +ds_train <- rsample$training(model_data_split) +ds_test <- rsample$testing(model_data_split) + +# verify distribution of data +table(ds_train$ft4_dia) %>% prop.table() +table(ds_test$ft4_dia) %>% prop.table() + + +ds_train <- ds_train %>% dplyr::select(-ft4_dia) +ds_test <- ds_test %>% dplyr::select(-ft4_dia) + +data_folds <- rsamp$vfold_cv(ds_train, repeats = 5) + + +# recipes ------------------------------------------------------------------ + + +# Neural Net, KNN +normalized_rec <- r$recipe(FT4 ~ ., data = ds_train) %>% + r$update_role(subject_id, new_role = "id") %>% + r$update_role(charttime, new_role = "time") %>% + r$step_impute_bag(r$all_predictors()) %>% + r$step_BoxCox(r$all_numeric()) %>% + r$step_corr(r$all_numeric_predictors()) %>% + r$step_normalize(r$all_numeric()) + + +# Random Forest and Boasted Tree +rf_rec <- r$recipe(FT4 ~ . , data = ds_train) %>% + r$update_role(subject_id, new_role = "id") %>% + r$update_role(charttime, new_role = "time") %>% + r$step_impute_bag(r$all_predictors()) + + + + +# models ------------------------------------------------------------------ + + +nnet_spec <- + p$mlp(hidden_units = tune(), penalty = tune(), epochs = tune()) %>% + p$set_engine("nnet", MaxNWts = 2600) %>% + p$set_mode("regression") + + +knn_spec <- + p$nearest_neighbor(neighbors = tune(), dist_power = tune(), weight_func = tune()) %>% + p$set_engine("kknn") %>% + p$set_mode("regression") + + +rf_spec <- + p$rand_forest(mtry = tune(), min_n = tune(), trees = 1000) %>% + p$set_engine("ranger") %>% + p$set_mode("regression") + + +xgb_spec <- + p$boost_tree(tree_depth = tune(), learn_rate = tune(), loss_reduction = tune(), + min_n = tune(), sample_size = tune(), trees = tune()) %>% + p$set_engine("xgboost") %>% + p$set_mode("regression") + + + +nnet_param <- + nnet_spec %>% + tune$extract_parameter_set_dials() %>% + update(hidden_units = d$hidden_units(c(1, 27))) + + + +# workflows --------------------------------------------------------------- + +normalized <- + workflowsets::workflow_set( + preproc = list(normalized = normalized_rec), + models = list(KNN = knn_spec, neural_network = nnet_spec) + ) %>% + workflowsets::option_add(param_info = nnet_param, id = "normalized_neural_network") + +forests <- + workflowsets::workflow_set( + preproc = list(forests = rf_rec), + models = list(RF = rf_spec, boosting = xgb_spec) + ) + + +all_workflows <- + dplyr::bind_rows(normalized, forests) %>% + dplyr::mutate(wflow_id = gsub("(forests_)|(normalized_)", "", wflow_id)) diff --git a/ML/2-modeling.R b/ML/2-modeling.R index 675dd7a..7838cad 100644 --- a/ML/2-modeling.R +++ b/ML/2-modeling.R @@ -121,48 +121,66 @@ final_conf_rf <- ys$conf_mat(final_rf_predict, ft4_dia, .pred_class) # random forest regression ------------------------------------------------ - -reg_metrics <- ys$metric_set(ys$rmse, ys$rsq, ys$mae) - -rf_base_reg_model <- p$rand_forest() %>% - p$set_engine("ranger") %>% p$set_mode("regression") - -rf_reg_recipe <- r$recipe(FT4 ~ . , data = reg_train) %>% - r$update_role(subject_id, new_role = "id") %>% - r$update_role(charttime, new_role = "time") %>% - r$step_impute_bag(r$all_predictors()) - - -rf_reg_workflow <- wf$workflow() %>% - wf$add_model(rf_base_reg_model) %>% - wf$add_recipe(rf_reg_recipe) - -rf_base_reg_fit <- p$fit(rf_reg_workflow, reg_train) - -rf_reg_predict <- reg_train %>% - dplyr::select(FT4) %>% - dplyr::bind_cols( - predict(rf_base_reg_fit, reg_train) - ) - -reg_metrics(rf_reg_predict, truth = FT4, estimate = .pred) - -rf_reg_tune_model <- p$rand_forest(trees = tune(), mtry = tune(), min_n = tune()) %>% - p$set_engine("ranger") %>% p$set_mode("regression") - -rf_reg_pred <- dplyr::select(reg_train, -FT4, -subject_id, -charttime) - -rf_reg_param <- p$extract_parameter_set_dials(rf_reg_tune_model) %>% - update(mtry = d$finalize(d$mtry(), rf_reg_pred))z - -data_fold_reg <- rsamp$vfold_cv(reg_train, v = 5) - -# takes around 1 hr to run grid search. saving best params manaually -rf_reg_tune <- rf_reg_workflow %>% - tune::tune_grid( - data_fold - ,grid = rf_reg_param %>% d$grid_regular() - ) - - - +# +# reg_metrics <- ys$metric_set(ys$rmse, ys$rsq, ys$mae) +# +# rf_base_reg_model <- p$rand_forest() %>% +# p$set_engine("ranger") %>% p$set_mode("regression") +# +# rf_reg_recipe <- r$recipe(FT4 ~ . , data = reg_train) %>% +# r$update_role(subject_id, new_role = "id") %>% +# r$update_role(charttime, new_role = "time") %>% +# r$step_impute_bag(r$all_predictors()) +# +# +# rf_reg_workflow <- wf$workflow() %>% +# wf$add_model(rf_base_reg_model) %>% +# wf$add_recipe(rf_reg_recipe) +# +# rf_base_reg_fit <- p$fit(rf_reg_workflow, reg_train) +# +# rf_reg_predict <- reg_train %>% +# dplyr::select(FT4) %>% +# dplyr::bind_cols( +# predict(rf_base_reg_fit, reg_train) +# ) +# +# reg_metrics(rf_reg_predict, truth = FT4, estimate = .pred) +# +# rf_reg_tune_model <- p$rand_forest(trees = tune(), mtry = tune(), min_n = tune()) %>% +# p$set_engine("ranger") %>% p$set_mode("regression") +# +# rf_reg_pred <- dplyr::select(reg_train, -FT4, -subject_id, -charttime) +# +# rf_reg_param <- p$extract_parameter_set_dials(rf_reg_tune_model) %>% +# update(mtry = d$finalize(d$mtry(), rf_reg_pred)) +# +# data_fold_reg <- rsamp$vfold_cv(reg_train, v = 5) +# +# rf_reg_workflow <- wf$update_model(rf_reg_workflow, rf_reg_tune_model) +# +# # takes around 1 hr to run grid search. saving best params manaually +# # rf_reg_tune <- rf_reg_workflow %>% +# # tune::tune_grid( +# # data_fold_reg +# # ,grid = rf_reg_param %>% d$grid_regular() +# # ) +# +# rf_reg_best_params <- tibble::tibble( +# mtry = 8 +# ,trees = 1000 +# ,min_n = 2 +# ) +# +# final_rf_reg_workflow <- rf_reg_workflow %>% +# tune::finalize_workflow(rf_reg_best_params) +# +# final_rf_reg_fit <- p$fit(final_rf_reg_workflow, reg_train) +# +# final_rf_reg_predict <- reg_train %>% +# dplyr::select(FT4) %>% +# dplyr::bind_cols( +# predict(final_rf_reg_fit, reg_train) +# ) +# +# reg_metrics(final_rf_reg_predict, truth = FT4, estimate = .pred)