## standard method to create feature weights automatically during training
## model <- kbsvm( .... , featureWeights="yes", .....)
## this example describes the case where feature weights were not created
## during training but should be added later to the model
## load example sequences and select a small set of sequences
## to speed up training for demonstration purpose
data(TFBS)
## create sample indices of training and test subset
train <- sample(1:length(yFB), 200)
test <- c(1:length(yFB))[-train]
## determin all labels
allLables <- unique(yFB)
## create a kernel object
gappyK1M4 <- gappyPairKernel(k=1, m=4)
## model is trainded with creation of feature weights
model <- kbsvm(enhancerFB[train], yFB[train], gappyK1M4,
pkg="LiblineaR", svm="C-svc", cost=20)
## feature weights included in model
featureWeights(model)
## Not run:
# ## model is originally trainded without creation of feature weights
# model <- kbsvm(enhancerFB[train], yFB[train], gappyK1M4,
# pkg="LiblineaR", svm="C-svc", cost=20, featureWeights="no")
#
# ## no feature weights included in model
# featureWeights(model)
#
# ## later after training add feature weights and model offset of model to
# ## KeBABS model
# featureWeights(model) <- getFeatureWeights(model)
# modelOffset(model) <- getSVMSlotValue("b", model)
#
# ## show a part of the feature weights and the model offset
# featureWeights(model)[1:7]
# modelOffset(model)
#
# ## another scenario for getFeatureWeights is to test the performance
# ## behavior of different prunings of the feature weights
#
# ## show histogram of full feature weights
# hist(featureWeights(model), breaks=30)
#
# ## show number of features
# length(featureWeights(model))
#
# ## first predict with full feature weights to see how performance
# ## when feature weights are included in the model prediction is always
# ## performed with the feature weights
# ## changes through pruning
# pred <- predict(model, enhancerFB[test])
# evaluatePrediction(pred, yFB[test], allLabels=allLables)
#
# ## add feature weights with pruning to absolute values larger than 0.6
# ## model offset was assigned above and is not impacted by pruning
# featureWeights(model) <- getFeatureWeights(model, weightLimit=0.6)
#
# ## show histogram of full feature weights
# hist(featureWeights(model), breaks=30)
#
# ## show reduced number of features
# length(featureWeights(model))
#
# ## now predict with pruned feature weights
# pred <- predict(model, enhancerFB, sel=test)
# evaluatePrediction(pred, yFB[test], allLabels=allLables)
# ## End(Not run)
Run the code above in your browser using DataLab