ridge <-function(w, X, y, lambda = .1) {# X: model matrix; # y: target; # lambda: penalty parameter; # w: the weights/coefficientscrossprod(y - X %*% w) + lambda *length(y) *crossprod(w)}set.seed(8675309)N =500p =10X =scale(matrix(rnorm(N * p), ncol = p))b =c(.5, -.5, .25, -.25, .125, -.125, rep(0, 4))y =scale(X %*% b +rnorm(N, sd = .5))#' Note, if `lambda=0`, result is the same as `lm.fit`.#' #' result_ridge =optim(rep(0, ncol(X)), ridge,X = X,y = y,lambda = .1,method ='BFGS')#' Analytical result.#' result_ridge2 =solve(crossprod(X) +diag(length(y)*.1, ncol(X))) %*%crossprod(X, y)#' Alternative with augmented data (note sigma ignored as it equals 1, but otherwise#' X/sigma and y/sigma).#' X2 =rbind(X, diag(sqrt(length(y)*.1), ncol(X)))y2 =c(y, rep(0, ncol(X)))result_ridge3 =solve(crossprod(X2)) %*%crossprod(X2, y2)#' `glmnet` is by default a mixture of ridge and lasso penalties, setting alpha#' = 1 reduces to lasso, while alpha=0 would be ridge.library(glmnet)