set.seed(100) # This may be removed, it ensures consistency of the daily tests
## Simulate from Y=XB+E, the dimension of Y is N x K, X is N x p, B is p x K
N <- 50 #number of samples
p <- 50 #number of features
K <- 25 #number of groups
B<-matrix(sample(c(rep(1,p*K*0.1),rep(0, p*K-as.integer(p*K*0.1)))),nrow=p,ncol=K)
X<-matrix(rnorm(N*p,1,1),nrow=N,ncol=p)
Y<-X%*%B+matrix(rnorm(N*K,0,1),N,K)
lambda<-lsgl.lambda(X,Y, alpha=1, lambda.min=.5, intercept=FALSE)
fit <-lsgl(X,Y, alpha=1, lambda = lambda, intercept=FALSE)
## ||B - \\beta||_F
sapply(fit$beta, function(beta) sum((B - beta)^2))
## Plot
par(mfrow = c(3,1))
image(B, main = "True B")
image(as.matrix(fit$beta[[100]]), main = "Lasso estimate (lambda = 0.5)")
image(solve(t(X)%*%X)%*%t(X)%*%Y, main = "Least squares estimate")
# The training error of the models
Err(fit, X)
# In this cases this is simply the loss function
1/(sqrt(N)*K)*sqrt(fit$loss)
Run the code above in your browser using DataLab