## Not run:
#
# ##generate expression feature matrix
# sampleVec1 <- c(1, 2, 3, 4, 5, 6)
# sampleVec2 <- c(1, 2, 3, 4, 5, 6)
# featureMat <- expFeatureMatrix( expMat1 = ControlExpMat, sampleVec1 = sampleVec1,
# expMat2 = SaltExpMat, sampleVec2 = sampleVec2,
# logTransformed = TRUE, base = 2,
# features = c("zscore", "foldchange", "cv", "expression"))
#
# ##positive samples
# positiveSamples <- as.character(sampleData$KnownSaltGenes)
# ##unlabeled samples
# unlabelSamples <- setdiff( rownames(featureMat), positiveSamples )
# idx <- sample(length(unlabelSamples))
# ##randomly selecting 1000 unlabeled samples as negative samples
# negativeSamples <- unlabelSamples[idx[1:1000]]
#
# ##for random forest, and using five-fold cross validation for obtaining optimal parameters
# cl <- classifier( method = "randomForest",
# featureMat = featureMat,
# positiveSamples = positiveSamples,
# negativeSamples = negativeSamples,
# tunecontrol = tune.control(sampling = "cross", cross = 5),
# ntree = 100 ) #build 100 trees for the forest
#
#
# ##svm and using five-fold cross validation for obtaining optimal parameters
# cl <- classifier( method = "svm", featureMat = featureMat,
# positiveSamples = positiveSamples,
# negativeSamples = negativeSamples,
# tunecontrol = tune.control(sampling = "cross", cross = 5),
# kernel = "radial",
# probability = TRUE,
# ranges = list(gamma = 2^(-2:2),
# cost = 2^(-4:4)) ) #for radial kernel and the parameter space.
#
# ##neural network, using one split for training/validation set
# cl <- classifier( method = "nnet", featureMat = featureMat,
# positiveSamples = positiveSamples,
# negativeSamples = negativeSamples,
# tunecontrol = tune.control(sampling = "fix"),
# trace = TRUE, size = 10 ) #for nnet parameters.
#
#
# ## End(Not run)
Run the code above in your browser using DataLab