lasso <-function( X, # model matrix y, # targetlambda = .1, # penalty parametersoft =TRUE, # soft vs. hard thresholdingtol =1e-6, # toleranceiter =100, # number of max iterationsverbose =TRUE# print out iteration number) {# soft thresholding function soft_thresh <-function(a, b) { out =rep(0, length(a)) out[a > b] = a[a > b] - b out[a <-b] = a[a <-b] + b out } w =solve(crossprod(X) +diag(lambda, ncol(X))) %*%crossprod(X,y) tol_curr =1 J =ncol(X) a =rep(0, J) c_ =rep(0, J) i =1while (tol < tol_curr && i < iter) { w_old = w a =colSums(X^2) l =length(y)*lambda # for consistency with glmnet approach c_ =sapply(1:J, function(j) sum( X[,j] * (y - X[,-j] %*% w_old[-j]) ))if (soft) {for (j in1:J) { w[j] =soft_thresh(c_[j]/a[j], l/a[j]) } }else { w = w_old w[c_< l & c_ >-l] =0 } tol_curr =crossprod(w - w_old) i = i +1if (verbose && i%%10==0) message(i) } w}#' # Data setup#' #' set.seed(8675309)N =500p =10X =scale(matrix(rnorm(N*p), ncol=p))b =c(.5, -.5, .25, -.25, .125, -.125, rep(0, p-6))y =scale(X %*% b +rnorm(N, sd=.5))lambda = .1# debugonce(lasso)#' Note, if `lambda=0`, result is the same as `lm.fit`.#' #' result_soft =lasso( X, y,lambda = lambda,tol =1e-12,soft =TRUE)result_hard =lasso( X, y,lambda = lambda,tol =1e-12,soft =FALSE)#' `glmnet` is by default a mixture of ridge and lasso penalties, setting alpha#' = 1 reduces to lasso (alpha=0 would be ridge). We set the lambda to a couple#' values while only wanting the one set to the same lambda value as above (s).library(glmnet)