## Not run: ------------------------------------
# # Not tabulated well to keep under 100 characters per line
# mega_model <- LauraeML(data = data,
# label = targets,
# folds = list(1:1460, 1461:2919),
# seed = 0,
# models = list(lgb = LauraeML_lgbreg,
# xgb = LauraeML_gblinear),
# parallelized = FALSE,
# optimize = TRUE,
# no_train = FALSE,
# logging = NULL,
# maximize = FALSE, # FALSE on RMSE, fast example of doing the worst
# features = 0.50,
# hyperparams = list(lgb = list(Mean = c(5, 5, 1, 0.7, 0.7, 0.5, 0.5),
# Sd = c(3, 3, 1, 0.2, 0.2, 0.5, 0.5),
# Min = c(1, 1, 0, 0.1, 0.1, 0, 0),
# Max = c(15, 50, 50, 1, 1, 50, 50)),
# xgb = list(Mean = c(1, 1, 1),
# Sd = c(1, 1, 1),
# Min = c(0, 0, 0),
# Max = c(2, 2, 2))),
# n_tries = 10, # Set this big, preferably 10 * number of features
# n_iters = 1, # Set this big to like 50
# early_stop = 2,
# elites = 0.4,
# feature_smoothing = 1,
# converge_cont = 0.5,
# converge_disc = 0.25)
## ---------------------------------------------
Run the code above in your browser using DataLab