add new script

This commit is contained in:
Kyle Belanger 2023-03-17 16:13:34 -04:00
parent 4240141fbd
commit add4d4a9f6
2 changed files with 196 additions and 45 deletions

133
ML/2-modeling-reg.R Normal file
View file

@ -0,0 +1,133 @@
# The following script is for training and testing Regression models
rm(list = ls(all.names = TRUE)) # Clear the memory of variables from previous run.
cat("\014") # Clear the console
# load packages -----------------------------------------------------------
box::use(
magrittr[`%>%`]
,here[here]
,readr
,gp2 = ggplot2[ggplot, aes]
,rsample
,r = recipes
,wf = workflows
,p = parsnip[tune]
,ys = yardstick
,d = dials
,rsamp = rsample
,tune
)
# globals -----------------------------------------------------------------
set.seed(070823) #set seed for reproducible research
# load-data ---------------------------------------------------------------
model_data <- readr$read_rds(here("ML","data-unshared","model_data.RDS"))
# split data --------------------------------------------------------------
model_data_split <- rsample$initial_split(
model_data
,prop = 0.80
,strata = ft4_dia
)
ds_train <- rsample$training(model_data_split)
ds_test <- rsample$testing(model_data_split)
# verify distribution of data
table(ds_train$ft4_dia) %>% prop.table()
table(ds_test$ft4_dia) %>% prop.table()
ds_train <- ds_train %>% dplyr::select(-ft4_dia)
ds_test <- ds_test %>% dplyr::select(-ft4_dia)
data_folds <- rsamp$vfold_cv(ds_train, repeats = 5)
# recipes ------------------------------------------------------------------
# Neural Net, KNN
normalized_rec <- r$recipe(FT4 ~ ., data = ds_train) %>%
r$update_role(subject_id, new_role = "id") %>%
r$update_role(charttime, new_role = "time") %>%
r$step_impute_bag(r$all_predictors()) %>%
r$step_BoxCox(r$all_numeric()) %>%
r$step_corr(r$all_numeric_predictors()) %>%
r$step_normalize(r$all_numeric())
# Random Forest and Boasted Tree
rf_rec <- r$recipe(FT4 ~ . , data = ds_train) %>%
r$update_role(subject_id, new_role = "id") %>%
r$update_role(charttime, new_role = "time") %>%
r$step_impute_bag(r$all_predictors())
# models ------------------------------------------------------------------
nnet_spec <-
p$mlp(hidden_units = tune(), penalty = tune(), epochs = tune()) %>%
p$set_engine("nnet", MaxNWts = 2600) %>%
p$set_mode("regression")
knn_spec <-
p$nearest_neighbor(neighbors = tune(), dist_power = tune(), weight_func = tune()) %>%
p$set_engine("kknn") %>%
p$set_mode("regression")
rf_spec <-
p$rand_forest(mtry = tune(), min_n = tune(), trees = 1000) %>%
p$set_engine("ranger") %>%
p$set_mode("regression")
xgb_spec <-
p$boost_tree(tree_depth = tune(), learn_rate = tune(), loss_reduction = tune(),
min_n = tune(), sample_size = tune(), trees = tune()) %>%
p$set_engine("xgboost") %>%
p$set_mode("regression")
nnet_param <-
nnet_spec %>%
tune$extract_parameter_set_dials() %>%
update(hidden_units = d$hidden_units(c(1, 27)))
# workflows ---------------------------------------------------------------
normalized <-
workflowsets::workflow_set(
preproc = list(normalized = normalized_rec),
models = list(KNN = knn_spec, neural_network = nnet_spec)
) %>%
workflowsets::option_add(param_info = nnet_param, id = "normalized_neural_network")
forests <-
workflowsets::workflow_set(
preproc = list(forests = rf_rec),
models = list(RF = rf_spec, boosting = xgb_spec)
)
all_workflows <-
dplyr::bind_rows(normalized, forests) %>%
dplyr::mutate(wflow_id = gsub("(forests_)|(normalized_)", "", wflow_id))

View file

@ -121,48 +121,66 @@ final_conf_rf <- ys$conf_mat(final_rf_predict, ft4_dia, .pred_class)
# random forest regression ------------------------------------------------ # random forest regression ------------------------------------------------
#
reg_metrics <- ys$metric_set(ys$rmse, ys$rsq, ys$mae) # reg_metrics <- ys$metric_set(ys$rmse, ys$rsq, ys$mae)
#
rf_base_reg_model <- p$rand_forest() %>% # rf_base_reg_model <- p$rand_forest() %>%
p$set_engine("ranger") %>% p$set_mode("regression") # p$set_engine("ranger") %>% p$set_mode("regression")
#
rf_reg_recipe <- r$recipe(FT4 ~ . , data = reg_train) %>% # rf_reg_recipe <- r$recipe(FT4 ~ . , data = reg_train) %>%
r$update_role(subject_id, new_role = "id") %>% # r$update_role(subject_id, new_role = "id") %>%
r$update_role(charttime, new_role = "time") %>% # r$update_role(charttime, new_role = "time") %>%
r$step_impute_bag(r$all_predictors()) # r$step_impute_bag(r$all_predictors())
#
#
rf_reg_workflow <- wf$workflow() %>% # rf_reg_workflow <- wf$workflow() %>%
wf$add_model(rf_base_reg_model) %>% # wf$add_model(rf_base_reg_model) %>%
wf$add_recipe(rf_reg_recipe) # wf$add_recipe(rf_reg_recipe)
#
rf_base_reg_fit <- p$fit(rf_reg_workflow, reg_train) # rf_base_reg_fit <- p$fit(rf_reg_workflow, reg_train)
#
rf_reg_predict <- reg_train %>% # rf_reg_predict <- reg_train %>%
dplyr::select(FT4) %>% # dplyr::select(FT4) %>%
dplyr::bind_cols( # dplyr::bind_cols(
predict(rf_base_reg_fit, reg_train) # predict(rf_base_reg_fit, reg_train)
) # )
#
reg_metrics(rf_reg_predict, truth = FT4, estimate = .pred) # reg_metrics(rf_reg_predict, truth = FT4, estimate = .pred)
#
rf_reg_tune_model <- p$rand_forest(trees = tune(), mtry = tune(), min_n = tune()) %>% # rf_reg_tune_model <- p$rand_forest(trees = tune(), mtry = tune(), min_n = tune()) %>%
p$set_engine("ranger") %>% p$set_mode("regression") # p$set_engine("ranger") %>% p$set_mode("regression")
#
rf_reg_pred <- dplyr::select(reg_train, -FT4, -subject_id, -charttime) # rf_reg_pred <- dplyr::select(reg_train, -FT4, -subject_id, -charttime)
#
rf_reg_param <- p$extract_parameter_set_dials(rf_reg_tune_model) %>% # rf_reg_param <- p$extract_parameter_set_dials(rf_reg_tune_model) %>%
update(mtry = d$finalize(d$mtry(), rf_reg_pred))z # update(mtry = d$finalize(d$mtry(), rf_reg_pred))
#
data_fold_reg <- rsamp$vfold_cv(reg_train, v = 5) # data_fold_reg <- rsamp$vfold_cv(reg_train, v = 5)
#
# takes around 1 hr to run grid search. saving best params manaually # rf_reg_workflow <- wf$update_model(rf_reg_workflow, rf_reg_tune_model)
rf_reg_tune <- rf_reg_workflow %>% #
tune::tune_grid( # # takes around 1 hr to run grid search. saving best params manaually
data_fold # # rf_reg_tune <- rf_reg_workflow %>%
,grid = rf_reg_param %>% d$grid_regular() # # tune::tune_grid(
) # # data_fold_reg
# # ,grid = rf_reg_param %>% d$grid_regular()
# # )
#
# rf_reg_best_params <- tibble::tibble(
# mtry = 8
# ,trees = 1000
# ,min_n = 2
# )
#
# final_rf_reg_workflow <- rf_reg_workflow %>%
# tune::finalize_workflow(rf_reg_best_params)
#
# final_rf_reg_fit <- p$fit(final_rf_reg_workflow, reg_train)
#
# final_rf_reg_predict <- reg_train %>%
# dplyr::select(FT4) %>%
# dplyr::bind_cols(
# predict(final_rf_reg_fit, reg_train)
# )
#
# reg_metrics(final_rf_reg_predict, truth = FT4, estimate = .pred)