# NOT RUN {
################################################################
## Sparse linear regression
## Generate the design matrix and regression coefficient vector
n = 100 # sample number
d = 80 # sample dimension
c = 0.5 # correlation parameter
s = 20 # support size of coefficient
set.seed(2016)
X = scale(matrix(rnorm(n*d),n,d)+c*rnorm(n))/sqrt(n-1)*sqrt(n)
beta = c(runif(s), rep(0, d-s))
## Generate response using Gaussian noise, and fit sparse linear models
noise = rnorm(n)
Y = X%*%beta + noise
## l1 regularization solved with naive update
fitted.l1.naive = picasso(X, Y, nlambda=100, type.gaussian="naive")
## l1 regularization solved with covariance update
fitted.l1.covariance = picasso(X, Y, nlambda=100, type.gaussian="covariance")
## mcp regularization
fitted.mcp = picasso(X, Y, nlambda=100, method="mcp")
## scad regularization
fitted.scad = picasso(X, Y, nlambda=100, method="scad")
## lambdas used
print(fitted.l1.naive$lambda)
## number of nonzero coefficients for each lambda
print(fitted.l1.naive$df)
## coefficients and intercept for the i-th lambda
i = 30
print(fitted.l1.naive$lambda[i])
print(fitted.l1.naive$beta[,i])
print(fitted.l1.naive$intercept[i])
## Visualize the solution path
plot(fitted.l1.naive)
plot(fitted.l1.covariance)
plot(fitted.mcp)
plot(fitted.scad)
################################################################
## Sparse logistic regression
## Generate the design matrix and regression coefficient vector
n <- 100 # sample number
d <- 80 # sample dimension
c <- 0.5 # parameter controlling the correlation between columns of X
s <- 20 # support size of coefficient
set.seed(2016)
X <- scale(matrix(rnorm(n*d),n,d)+c*rnorm(n))/sqrt(n-1)*sqrt(n)
beta <- c(runif(s), rep(0, d-s))
## Generate response and fit sparse logistic models
p = 1/(1+exp(-X%*%beta))
Y = rbinom(n, rep(1,n), p)
## l1 regularization
fitted.l1 = picasso(X, Y, nlambda=100, family="binomial", method="l1")
## mcp regularization
fitted.mcp = picasso(X, Y, nlambda=100, family="binomial", method="mcp")
## scad regularization
fitted.scad = picasso(X, Y, nlambda=100, family="binomial", method="scad")
## lambdas used
print(fitted.l1$lambda)
## number of nonzero coefficients for each lambda
print(fitted.l1$df)
## coefficients and intercept for the i-th lambda
i = 30
print(fitted.l1$lambda[i])
print(fitted.l1$beta[,i])
print(fitted.l1$intercept[i])
## Visualize the solution path
plot(fitted.l1)
## Estimate of Bernoulli parameters
param.l1 = fitted.l1$p
################################################################
## Sparse poisson regression
## Generate the design matrix and regression coefficient vector
n <- 100 # sample number
d <- 80 # sample dimension
c <- 0.5 # parameter controlling the correlation between columns of X
s <- 20 # support size of coefficient
set.seed(2016)
X <- scale(matrix(rnorm(n*d),n,d)+c*rnorm(n))/sqrt(n-1)*sqrt(n)
beta <- c(runif(s), rep(0, d-s))/sqrt(s)
## Generate response and fit sparse poisson models
p = X%*%beta+rnorm(n)
Y = rpois(n, exp(p))
## l1 regularization
fitted.l1 = picasso(X, Y, nlambda=100, family="poisson", method="l1")
## mcp regularization
fitted.mcp = picasso(X, Y, nlambda=100, family="poisson", method="mcp")
## scad regularization
fitted.scad = picasso(X, Y, nlambda=100, family="poisson", method="scad")
## lambdas used
print(fitted.l1$lambda)
## number of nonzero coefficients for each lambda
print(fitted.l1$df)
## coefficients and intercept for the i-th lambda
i = 30
print(fitted.l1$lambda[i])
print(fitted.l1$beta[,i])
print(fitted.l1$intercept[i])
## Visualize the solution path
plot(fitted.l1)
# }
Run the code above in your browser using DataLab