## Not run: ------------------------------------
# # Load libraries
# library(data.table)
# library(Matrix)
# library(xgboost)
#
# # Create data
# data(agaricus.train, package = "lightgbm")
# data(agaricus.test, package = "lightgbm")
# agaricus_data_train <- data.table(as.matrix(agaricus.train$data))
# agaricus_data_test <- data.table(as.matrix(agaricus.test$data))
# agaricus_label_train <- agaricus.train$label
# agaricus_label_test <- agaricus.test$label
# folds <- Laurae::kfold(agaricus_label_train, 5)
#
# # Train a model (binary classification) - FAST VERSION
# model <- MGScanning(data = agaricus_data_train, # Training data
# labels = agaricus_label_train, # Training labels
# folds = folds, # Folds for cross-validation
# dimensions = 1, # Change this for 2 dimensions if needed
# depth = 10, # Change this to change the sliding window size
# stride = 20, # Change this to change the sliding window speed
# nthread = 1, # Change this to use more threads
# lr = 1, # Do not touch this unless you are expert
# training_start = NULL, # Do not touch this unless you are expert
# validation_start = NULL, # Do not touch this unless you are expert
# n_forest = 2, # Number of forest models
# n_trees = 30, # Number of trees per forest
# random_forest = 1, # We want only 2 random forest
# seed = 0,
# objective = "binary:logistic",
# eval_metric = Laurae::df_logloss,
# multi_class = 2, # Modify this for multiclass problems)
# verbose = TRUE)
#
# # Train a model (binary classification) - SLOW
# model <- MGScanning(data = agaricus_data_train, # Training data
# labels = agaricus_label_train, # Training labels
# folds = folds, # Folds for cross-validation
# dimensions = 1, # Change this for 2 dimensions if needed
# depth = 10, # Change this to change the sliding window size
# stride = 1, # Change this to change the sliding window speed
# nthread = 1, # Change this to use more threads
# lr = 1, # Do not touch this unless you are expert
# training_start = NULL, # Do not touch this unless you are expert
# validation_start = NULL, # Do not touch this unless you are expert
# n_forest = 2, # Number of forest models
# n_trees = 30, # Number of trees per forest
# random_forest = 1, # We want only 2 random forest
# seed = 0,
# objective = "binary:logistic",
# eval_metric = Laurae::df_logloss,
# multi_class = 2, # Modify this for multiclass problems)
# verbose = TRUE)
#
# # Create predictions
# data_predictions <- model$preds
#
# # Example on fake pictures (matrices) and multiclass problem
#
# # Generate fake images
# new_data <- list(matrix(rnorm(n = 400), ncol = 20, nrow = 20),
# matrix(rnorm(n = 400), ncol = 20, nrow = 20),
# matrix(rnorm(n = 400), ncol = 20, nrow = 20),
# matrix(rnorm(n = 400), ncol = 20, nrow = 20),
# matrix(rnorm(n = 400), ncol = 20, nrow = 20),
# matrix(rnorm(n = 400), ncol = 20, nrow = 20),
# matrix(rnorm(n = 400), ncol = 20, nrow = 20),
# matrix(rnorm(n = 400), ncol = 20, nrow = 20),
# matrix(rnorm(n = 400), ncol = 20, nrow = 20),
# matrix(rnorm(n = 400), ncol = 20, nrow = 20))
#
# # Generate fake labels
# new_labels <- c(2, 1, 0, 2, 1, 0, 2, 1, 0, 0)
#
# # Train a model (multiclass problem)
# model <- MGScanning(data = new_data, # Training data
# labels = new_labels, # Training labels
# folds = list(1:3, 3:6, 7:10), # Folds for cross-validation
# dimensions = 2,
# depth = 10,
# stride = 1,
# nthread = 1, # Change this to use more threads
# lr = 1, # Do not touch this unless you are expert
# training_start = NULL, # Do not touch this unless you are expert
# validation_start = NULL, # Do not touch this unless you are expert
# n_forest = 2, # Number of forest models
# n_trees = 10, # Number of trees per forest
# random_forest = 1, # We want only 2 random forest
# seed = 0,
# objective = "multi:softprob",
# eval_metric = Laurae::df_logloss,
# multi_class = 3, # Modify this for multiclass problems)
# verbose = TRUE)
#
# # Matrix output is 10x600
# dim(model$preds)
## ---------------------------------------------
Run the code above in your browser using DataLab