1. Generating data
require(MASS)
## Loading required package: MASS
## Warning: package 'MASS' was built under R version 3.5.3
require(glmnet)
## Loading required package: glmnet
## Warning: package 'glmnet' was built under R version 3.5.3
## Loading required package: Matrix
## Loading required package: foreach
## Warning: package 'foreach' was built under R version 3.5.3
## Loaded glmnet 2.0-18
data.gen= function(n_data){
    x1 = rnorm(n_data)
    x2 = rnorm(n_data)
    x = cbind(x1,x2)
    sigma = diag(1 + x1^2/2)
    eps = rnorm(n_data)
    y = 1 + 5 * x1 +  10 * x2 + eps
    ret = list(x = x,y = y,sigma = sigma)
    return(ret)
}
data = data.gen(100)
x = data$x
y = data$y
  1. Fit ols
lm.fit <- lm(y ~ x)
beta.init <- coef(lm.fit)
  1. Calculate weignt
gamma = 2
w  <- 1/abs(beta.init)^gamma
  1. Fit adaptive lasso
#lasso
lasso.fit <- cv.glmnet(x, y, family = "gaussian", alpha = 1, standardize = FALSE, nfolds = 10)
beta <- predict(lasso.fit, x, type="coefficients", s="lambda.min")

#adalasso
adalasso.fit <- cv.glmnet(x, y, family = "gaussian", alpha = 1, standardize = FALSE, nfolds = 10, penalty.factor=w)
ada.beta <- predict(adalasso.fit, x, type="coefficients", s="lambda.min")
  1. Compare Lasso vs Adaptive Lasso
par(mfrow=c(2,2))
plot(lasso.fit$lambda, ylab='lambda',main="LASSO")
plot(adalasso.fit$lambda, ylab='lambda',main='Adaptive LASSO')
plot(lasso.fit, main ='LASSO')
plot(adalasso.fit, main='Adaptive LASSO')

  1. Simulation
set.seed(1248)
iter = 500
n_data = 100
adalasso = data.frame(b0=vector(length = iter),b1=vector(length = iter),b2=vector(length = iter))
ols = data.frame(b0=vector(length = iter),b1=vector(length = iter),b2=vector(length = iter))
lasso = data.frame(b0=vector(length = iter),b1=vector(length = iter),b2=vector(length = iter))
for (i in 1:iter){
    data = data.gen(n_data)
    x = data$x
    y = data$y
    #ols
    lm.fit <- lm(y ~ x)
    beta.init <- coef(lm.fit)
    gamma = 0.5
    w  <- 1/(abs(beta.init)^gamma)
    #lasso
    lasso.fit <- cv.glmnet(x, y, family = "gaussian", alpha = 1, standardize = FALSE, nfolds = 10)
    beta <- predict(lasso.fit, x, type="coefficients", s="lambda.min")

    #adalasso
    adalasso.fit <- cv.glmnet(x, y, family = "gaussian", alpha = 1, standardize = FALSE, nfolds = 10, penalty.factor=w)
    ada.beta <- predict(adalasso.fit, x, type="coefficients", s="lambda.min")
    
    lasso$b0[i] = beta[1]
    lasso$b1[i] = beta[2]
    lasso$b2[i] = beta[3]
    adalasso$b0[i] = ada.beta[1]
    adalasso$b1[i] = ada.beta[2]
    adalasso$b2[i] = ada.beta[3]
    ols$b0[i] = as.numeric(beta.init[1])
    ols$b1[i] = as.numeric(beta.init[2])
    ols$b2[i] = as.numeric(beta.init[3])
}
cat('\t \tMean and Variance of LASSO')
##      Mean and Variance of LASSO
sapply(lasso,mean)
##       b0       b1       b2 
## 1.000559 4.944968 9.947987
sapply(lasso,var)
##          b0          b1          b2 
## 0.010993739 0.009569337 0.010083335
cat('\t \tMean and Variance of OLS')
##      Mean and Variance of OLS
sapply(ols,mean)
##        b0        b1        b2 
##  1.000102  4.997954 10.001356
sapply(ols,var)
##          b0          b1          b2 
## 0.010972358 0.009579694 0.010128058
cat('\t \tMean and Variance of Adaptive Lasso')
##      Mean and Variance of Adaptive Lasso
sapply(adalasso,mean)
##       b0       b1       b2 
## 1.000360 4.929305 9.970705
sapply(adalasso,var)
##         b0         b1         b2 
## 0.01105735 0.00951519 0.01011021
par(mfrow = c(3,1))
for (i in 1:dim(ols)[2]){
    main = as.character(paste0('Box plot of b',i-1))
    boxplot(ols[,i],lasso[,i],adalasso[,i],main=main, names=c('OLS','LASSO','Adaptive LASSO'))
}

par(mfrow = c(3,3))
hist(ols$b0)
hist(ols$b1)
hist(ols$b2)
hist(lasso$b0)
hist(lasso$b1)
hist(lasso$b2)
hist(adalasso$b0)
hist(adalasso$b1)
hist(adalasso$b2)

  1. Variance OLS vs Adaptive Lasso vs Lasso
sum(sapply(lasso,var)-sapply(adalasso,var))
## [1] -3.633935e-05
sum(sapply(ols,var) -sapply(adalasso,var))
## [1] -2.641705e-06
sum(sapply(ols,var) -sapply(lasso,var))
## [1] 3.369765e-05
  1. Result : OLS>LASSO>Adaptive LASSO