# NOT RUN {
#--------------------------------------------------------------------------
# Example: Diabetes
#
# See Efron B., Hastie T., Johnstone I., and Tibshirani R.
# Least angle regression. Ann. Statist., 32:407-499, 2004.
data(diabetes, package = "l2boost")
l2.object <- l2boost(diabetes$x,diabetes$y, M=1000, nu=.01)
# Plot the gradient-correlation, and regression beta coefficients as a function of
# boosting steps m
par(mfrow=c(2,2))
plot(l2.object)
abline(v=500, lty=2, col="grey")
plot(l2.object, type="coef")
abline(v=500, lty=2, col="grey")
# limit the plot to only the first 500 steps of the algorithm
# (grey vertical line in previous plots).
plot(l2.object, xlim=c(0,500))
plot(l2.object, type="coef", xlim=c(0,500))
# }
# NOT RUN {
#--------------------------------------------------------------------------
# Example: Plotting cross-validation objects
dta <- elasticNetSim(n=100)
# Set the boosting parameters
Mtarget = 1000
nuTarget = 1.e-2
cv.l2 <- cv.l2boost(dta$x,dta$y,M=Mtarget, nu=nuTarget, lambda=NULL)
# Show the CV MSE plot, with a marker at the "optimal iteration"
plot(cv.l2)
abline(v=cv.l2$opt.step, lty=2, col="grey")
# Show the l2boost object plots.
plot(cv.l2$fit)
abline(v=cv.l2$opt.step, lty=2, col="grey")
plot(cv.l2$fit, type="coef")
abline(v=cv.l2$opt.step, lty=2, col="grey")
# Create a color vector of length p=40 (from elasticNetSim defaults)
clr <- rep("black", 40)
# Set coordinates in the boosting path to color red.
clr[unique(cv.l2$fit$l.crit)] = "red"
# Show the "optimal" coefficient values,
# red points are selected in boosting algorithm.
plot(coef(cv.l2$fit, m=cv.l2$opt.step), col=clr, ylab=expression(beta))
# }
# NOT RUN {
# }
Run the code above in your browser using DataLab