Codes

library(ggplot2)
library(patchwork)
library(directlabels)
library(lars)
library(glmnet)
library(HDInterval)


Final <- function(simulation_iter, simulation_burn, realdata_iter, realdata_burn){
  simulate <- function(n){
    xi1 <- rnorm(n, 2, 0.5)
    xi2 <- runif(n, -0.1, 0.1)
    xi3 <- rgamma(n, 2, 198)
    xi4 <- rnorm(n, 5, 1)
    eps <- rnorm(n, 0.1, 2)
    yi  <- 3*xi1+xi2+0.2*xi3-0.5*xi4+eps
    cbind(y=yi, x1=xi1, x2=xi2, x3=xi3, x4=xi4)
  }
  res <- function(){
    mcmcbayes <- function(xvar, yvar, start, n_iters=simulation_iter, burnin=simulation_burn, prior='gamma'){
      # used for calculating the log likelihood 
      sd_residuals <- function(b_est, m_est, xvar, yvar){
        y_preds <- xvar*m_est + b_est 
        resids <- yvar - y_preds 
        print(sd(resids)) 
      }
      # b = intercept
      # m = x coefficient
      # compare to: `logLik(lm(yvar~xvar))`
      loglik <- function(b_est, m_est, xvar, yvar){
        y_preds <- xvar*m_est + b_est
        likhood <- log(dnorm(yvar, mean=y_preds, sd=sd_residuals(b, m, xvar, yvar)))
        print(sum(likhood))
      }
      ### *** ###
      prior <- function(p){
        if(p=='gamma'){
          rgamma(1, p+r, d+0.5*sum(tau^2)) 
        }else if(p=='uniform'){
          runif(1, eta1,eta2)
        }else if(p=='beta'){
          rbeta(1, nu1, nu2)
        }
      }
      Lambda = runif(24)
      Theta  = runif(4)
      prior(p='theta')
      df = simulate(n=24)
      xvar = df[,2:5]
      yvar = df[,1]
      loglik <- function(Lambda, Theta, xvar, yvar){
        a1 <- log(prior(p='theta'))
        a2 <- sum(log(1+Lambda %*% xvar %*% (as.matrix(yvar)- xvar %*% as.matrix(Theta))))
        exp(a1-a2)
      }
      # play around with `b_params` and `m_params` 
      # also try setting different shapes for the priors (e.g. dbeta())
      prior <- function(b_est, m_est, b_params=c(0,5), m_params=c(0,10)){
        if(prior=='gamma'){
          prior_b <- log(dnorm(b_est, mean=b_params[1], sd=b_params[2]))
          prior_m <- log(dunif(m_est, min=m_params[1], max=m_params[2]))
          alpha <- rgamma(1, p+r, d+0.5*sum(tau^2)) 
        }else if(prior=='uniform'){
          prior_b <- log(dnorm(b_est, mean=b_params[1], sd=b_params[2]))
          prior_m <- log(dunif(m_est, min=m_params[1], max=m_params[2]))
          alpha <- runif(1, eta1,eta2)
        }else if(prior=='beta'){
          prior_b <- log(dnorm(b_est, mean=b_params[1], sd=b_params[2]))
          prior_m <- log(dunif(m_est, min=m_params[1], max=m_params[2]))
          alpha <- rbeta(1, nu1, nu2)
        }
        print(prior_b + prior_m + alpha)
      }
      posterior <- function(b_est, m_est, xvar, yvar){
        loglikhood <- loglik(b_est, m_est, xvar, yvar)
        combined_logprior <- prior(b_est, m_est)
        print(loglikhood + combined_logprior)
      }
      # play around with the `sd` parameters for generating new values 
      generate_new_values <- function(b_est, m_est){
        print(rnorm(2, mean=c(b_est, m_est), sd=c(.5,.1)))
      }
      # implements the metropolis-hastings MCMC algorithm. 
      # semi-intelligently tries different values iteratively to estimate parameters 
      mh_mcmc <- function(xvar, yvar, start, n_iters=simulation_iter, burnin=simulation_burn){
        # initialize the chain 
        chain <- matrix(rep(NA, (n_iters+1)*2), ncol=2)
        chain[1, ] <- start
        # for each iteration: 
        for (x in 1:n_iters){
          # x=1
          # guess some values for `m` and `b` 
          proposal <- generate_new_values(b_est=chain[x,1], m_est=chain[x,2])
          # unnormalized probabilities of new and most recent values 
          proposal_prob <- posterior(b_est=proposal[1], m_est=proposal[2], xvar, yvar)
          current_prob <- posterior(b_est=chain[x,1], m_est=chain[x,2], xvar, yvar)
          # 'prob' that new vals are more likely than current chain vals 
          # (NOTE: NOT A TRUE PROBABILITY -- CAN BE ABOVE 1 BUT USUALLY DOESNT MATTER)
          prob <- exp(proposal_prob - current_prob)
          # get a random probability 
          random_prob <- runif(1, min=0, max=1)
          # if prob for new value is better than random prob, accept proposal 
          if (random_prob < prob){
            chain[x+1, ] <- proposal
          } else {
            # otherwise retain most recent value 
            chain[x+1, ] <- chain[x, ]
          }
        }
        colnames(chain) <- c("theta_b", "theta_m")
        # throw out the first `burnin` iterations before print 
        if (!is.null(burnin)){
          print(chain[(floor(burnin*nrow(chain))+1) : nrow(chain), ])
        } else {
          print(chain)  
        }
      }
      print(chain)
    }
    ns <- c(20,40,100)
    alphas = rep(c(0,0.5,1,1.5,2,3),2)
    thetas = c(3,1,0.2,-0.5)
    n1=20;n2=40;n3=100
    df1 <- simulate(n=n1)
    df2 <- simulate(n=n2)
    df3 <- simulate(n=n3)
    x1 <- df1[,2:5]
    y1 <- df1[,1]
    x2 <- df2[,2:5]
    y2 <- df2[,1]
    x3 <- df3[,2:5]
    y3 <- df3[,1]
    ii1 <- c()
    for (i in 1:length(alphas)) {
      alpha <- alphas[i]
      jj <- c()
      for (j in 1:length(thetas)) {
        theta <- thetas[j]
        if(i <= 6){
          fit1 <- lars(x1,y1,type="lasso", lambda = alpha)
          mse1 <- mean((fit1$lambda - alpha)^2)
          fit2 <- mcmcbayes(x1,y1, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mse2 <- mean((apply(fit2, 2, mean)-alpha)^2)
          fit3 <- cv.glmnet(x1, y1, alpha = 1, nfolds = 5, parallel = TRUE, lambda = alpha)
          mse3 <- mean((fit3$lambda - alpha)^2)
        }else{
          fit1 <- cv.glmnet(x1, y1, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mse1 <- mean((fit1$lambda - alpha)^2)
          fit2 <- mcmcbayes(x1,y1, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mse2 <- mean((apply(fit2, 2, mean)-alpha)^2)
          fit3 <- cv.glmnet(x1, y1, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mse3 <- mean((fit3$lambda - alpha)^2)
        }
        jj <- c(jj, mse1, mse2, mse3)
      }
      ii1 <- rbind(ii1, jj)
    }
    
    ii2 <- c()
    for (i in 1:length(alphas)) {
      alpha <- alphas[i]
      jj <- c()
      for (j in 1:length(thetas)) {
        theta <- thetas[j]
        if(i <= 6){
          fit1 <- lars(x2,y2,type="lasso", lambda = alpha)
          mse1 <- mean((fit1$lambda - alpha)^2)
          fit2 <- mcmcbayes(x2,y2, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mse2 <- mean((apply(fit2, 2, mean)-alpha)^2)
          fit3 <- cv.glmnet(x2, y2, alpha = 1, nfolds = 5, parallel = TRUE, lambda = alpha)
          mse3 <- mean((fit1$lambda - alpha)^2)
        }else{
          fit1 <- cv.glmnet(x2, y2, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mse1 <- mean((fit1$lambda - alpha)^2)
          fit2 <- mcmcbayes(x2,y2, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mse2 <- mean((apply(fit2, 2, mean)-alpha)^2)
          fit3 <- cv.glmnet(x2, y2, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mse3 <- mean((fit1$lambda - alpha)^2)
        }
        jj <- c(jj, mse1, mse2, mse3)
      }
      ii2 <- rbind(ii2, jj)
    }
    
    ii3 <- c()
    for (i in 1:length(alphas)) {
      alpha <- alphas[i]
      jj <- c()
      for (j in 1:length(thetas)) {
        theta <- thetas[j]
        if(i <= 6){
          fit1 <- lars(x3,y3,type="lasso", lambda = alpha)
          mse1 <- mean((fit1$lambda - alpha)^2)
          fit2 <- mcmcbayes(x3,y3, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mse2 <- mean((apply(fit2, 2, mean)-alpha)^2)
          fit3 <- cv.glmnet(x3, y3, alpha = 1, nfolds = 5, parallel = TRUE, lambda = alpha)
          mse3 <- mean((fit3$lambda - alpha)^2)
        }else{
          fit1 <- cv.glmnet(x3, y3, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mse1 <- mean((fit1$lambda - alpha)^2)
          fit2 <- mcmcbayes(x3,y3, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mse2 <- mean((apply(fit2, 2, mean)-alpha)^2)
          fit3 <- cv.glmnet(x3, y3, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mse3 <- mean((fit3$lambda - alpha)^2)
        }
        jj <- c(jj, mse1, mse2, mse3)
      }
      ii3 <- rbind(ii3, jj)
    }
    tab1 <- rbind(ii1,ii2, ii3)
    tab11 <- cbind(rep(c(20,40,100),each=12),rep(c(0,0.5,1,1.5,2,3),6), tab1)
    methods <- rep(c('Lasso','Ridge'),each=6, times=3)
    colnames(tab11) <- c('n','shrinkage',rep(c('BEL','Bayes','Freq'), 4))
    tab11ff <- rbind(c(c(NA,NA),rep(c(3,1,0.2,-0.5),each=3)),tab11)
    rownames(tab11ff) <- c('theta',methods)
    ##############################################
    ii1 <- c()
    for (i in 1:length(alphas)) {
      alpha <- alphas[i]
      jj <- c()
      for (j in 1:length(thetas)) {
        theta <- thetas[j]
        if(i <= 6){
          fit1 <- lars(x1,y1,type="lasso", lambda = alpha)
          mbias1 <- mean(fit1$lambda - alpha)
          fit2 <- mcmcbayes(x1,y1, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mbias2 <- mean(apply(fit2, 2, mean)-alpha)
          fit3 <- cv.glmnet(x1, y1, alpha = 1, nfolds = 5, parallel = TRUE, lambda = alpha)
          mbias3 <- mean(fit3$lambda - alpha)
        }else{
          fit1 <- cv.glmnet(x1, y1, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mbias1 <- mean(fit1$lambda - alpha)
          fit2 <- mcmcbayes(x1,y1, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mbias2 <- mean(apply(fit2, 2, mean)-alpha)
          fit3 <- cv.glmnet(x1, y1, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mbias3 <- mean(fit3$lambda - alpha)
        }
        jj <- c(jj, mbias1, mbias2, mbias3)
      }
      ii1 <- rbind(ii1, jj)
    }
    
    ii2 <- c()
    for (i in 1:length(alphas)) {
      alpha <- alphas[i]
      jj <- c()
      for (j in 1:length(thetas)) {
        theta <- thetas[j]
        if(i <= 6){
          fit1 <- lars(x2,y2,type="lasso", lambda = alpha)
          mbias1 <- mean(fit1$lambda - alpha)
          fit2 <- mcmcbayes(x2,y2, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mbias2 <- mean(apply(fit2, 2, mean)-alpha)
          fit3 <- cv.glmnet(x2, y2, alpha = 1, nfolds = 5, parallel = TRUE, lambda = alpha)
          mbias3 <- mean(fit3$lambda - alpha)
        }else{
          fit1 <- cv.glmnet(x2, y2, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mbias1 <- mean(fit1$lambda - alpha)
          fit2 <- mcmcbayes(x2,y2, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mbias2 <- mean(apply(fit2, 2, mean)-alpha)
          fit3 <- cv.glmnet(x2, y2, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mbias3 <- mean(fit3$lambda - alpha)
        }
        jj <- c(jj, mbias1, mbias2, mbias3)
      }
      ii2 <- rbind(ii2, jj)
    }
    
    ii3 <- c()
    for (i in 1:length(alphas)) {
      alpha <- alphas[i]
      jj <- c()
      for (j in 1:length(thetas)) {
        theta <- thetas[j]
        if(i <= 6){
          fit1 <- lars(x3,y3,type="lasso", lambda = alpha)
          mbias1 <- mean(fit1$lambda - alpha)
          fit2 <- mcmcbayes(x3,y3, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mbias2 <- mean(apply(fit2, 2, mean)-alpha)
          fit3 <- cv.glmnet(x3, y3, alpha = 1, nfolds = 5, parallel = TRUE, lambda = alpha)
          mbias3 <- mean(fit3$lambda - alpha)
        }else{
          fit1 <- cv.glmnet(x3, y3, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mbias1 <- mean(fit1$lambda - alpha)
          fit2 <- mcmcbayes(x3,y3, start = thetas, n_iters = simulation_iter, burnin = simulation_burn)
          mbias2 <- mean(apply(fit2, 2, mean)-alpha)
          fit3 <- cv.glmnet(x3, y3, alpha = 0, nfolds = 5, parallel = TRUE, lambda = alpha)
          mbias3 <- mean(fit3$lambda - alpha)
        }
        jj <- c(jj, mbias1, mbias2, mbias3)
      }
      ii3 <- rbind(ii3, jj)
    }
    tab2 <- rbind(ii1,ii2, ii3)
    tab22 <- cbind(rep(c(20,40,100),each=12),rep(c(0,0.5,1,1.5,2,3),6), tab2)
    methods <- rep(c('Lasso','Ridge'),each=6, times=3)
    colnames(tab22) <- c('n','shrinkage',rep(c('BEL','Bayes','Freq'), 4))
    table2 <- rbind(c(c(NA,NA),rep(c(3,1,0.2,-0.5),each=3)),tab22)
    rownames(table2) <- c('theta',methods)
    #---------------------------------------------
    # Diabetes Data
    # http://statweb.lsu.edu/faculty/li/teach/exst7142/diabetes.html
    loc <- "https://www4.stat.ncsu.edu/~boos/var.select/diabetes.tab.txt"
    dat <- read.table(loc,header=T,sep="\t")
    # training and testing
    set.seed(2)
    i.train <- sample(1:nrow(dat), size=round(0.5*nrow(dat)), replace = F)
    dat.train <- dat[i.train,]
    dat.test  <- dat[-i.train,]
    x <- as.matrix(dat.train[,1:10])
    y <- dat.train[,11]
    diabetes_map_lm <- lm(y ~ x)
    diabetes_map_ridge <- glmnet(x, y, alpha = 1)
    diabetes_map_lasso <- glmnet(x, y, alpha = 0)
    thetas.start <- diabetes_map_lm$coefficients[-1]
    x <- as.matrix(dat.test[,1:10])
    y <- dat.test[,11]
    fit.train1 <- mcmcbayes(x,y, start = thetas.start, n_iters = realdata_iter, burnin = realdata_burn, prior = 'gamma')
    fit.train2 <- mcmcbayes(x,y, start = thetas.start, n_iters = realdata_iter, burnin = realdata_burn, prior = 'uniform')
    fit.train3 <- mcmcbayes(x,y, start = thetas.start, n_iters = realdata_iter, burnin = realdata_burn, prior = 'beta')
    shrink.post.1 <- mean(fit.train1[,11])
    shrink.post.2 <- mean(fit.train2[,11])
    shrink.post.3 <- mean(fit.train3[,11])
    table3 <- cbind(shrink.post.1, shrink.post.2, shrink.post.3)
    colnames(table3) <- c('Gamma','Uniform','Beta')
    rownames(table3) <- 'alpha_lasso'
    #---------------------------------------------
    post_params1 <- apply(fit.train1[,-11], 2, mean)
    post_params2 <- apply(fit.train2[,-11], 2, mean)
    post_params3 <- apply(fit.train3[,-11], 2, mean)
    table4 <- rbind(post_params1, post_params2, post_params3)
    colnames(table4) <- colnames(dat)[-ncol(dat)]
    rownames(table4) <- c('Gamma','Uniform','Beta')
    #------------------------------------------------
    HPD <- function(x) hdi(x, credMass=0.95)
    hpdg <- apply(fit.train1[,-11], 2, HPD)
    HPD_g <- t(hpdg)
    hpdu <- apply(fit.train2[,-11], 2, HPD)
    HPD_u <- t(hpdu)
    hpdb <- apply(fit.train3[,-11], 2, HPD)
    HPD_b <- t(hpdb)
    table5_hpd <- cbind(HPD_g,HPD_u,HPD_b)
    colnames(table5_hpd) <- c('Variables',c('Gamma_LO','Gamma_UP','Uniform_LO','Uniform_UP','Beta_LO','Beta_UP'))
    rownames(table5_hpd) <- NULL
    table5_hpd <- as.data.frame(table5_hpd)
    #------------------------------------------------
    CRI <- function(x) quantile(x, probs = c(.025,.975))
    CRI_g <- apply(fit.train1[,-11], 2, CRI)
    CRI_u <- apply(fit.train2[,-11], 2, CRI)
    CRI_b <- apply(fit.train3[,-11], 2, CRI)
    table5_cri <- cbind(CRI_g,CRI_u,CRI_b)
    colnames(table5_cri) <- c('Variables',c('Gamma_LO','Gamma_UP','Uniform_LO','Uniform_UP','Beta_LO','Beta_UP'))
    rownames(table5_cri) <- NULL
    table5_cri <- as.data.frame(table5_cri)
    #---------------------------------------------------
    alpha.vals <- c(0,0.5,1,5,10,50,200,500,1000,2000)
    BEL <- matrix(NA, nrow = length(alpha.vals), ncol=10)
    BL  <- matrix(NA, nrow = length(alpha.vals), ncol=10)
    for (i in 1:length(alpha.vals)) {
      x <- as.matrix(dat.train[,1:10])
      y <- dat.train[,11]
      fit.lar <- lars(x1,y1,type="lar",lambda=alpha.vals[i])
      BEL[i,] <- fit.lar$arc.length
      x <- as.matrix(dat.test[,1:10])
      y <- dat.test[,11]
      fit.mcmc <- mcmcbayes(x,y, start = thetas.start, n_iters = realdata_iter, burnin = realdata_burn)
      BL[i,] <- apply(fit.mcmc[,-11], 2, mean)
    }
    colnames(BEL) <- colnames(dat)[-ncol(dat)]
    colnames(BL)  <- colnames(dat)[-ncol(dat)]
    BEL <- apply(BEL, 2, scale)
    BL <- apply(BL, 2, scale)
    #---------------------------------------------------
    alpha.vals <- c(0,0.5,1,5,10,50,100,200,500,1000,2000)
    BEL2 <- matrix(NA, nrow = length(alpha.vals), ncol=11)
    BL2  <- matrix(NA, nrow = length(alpha.vals), ncol=11)
    for (i in 1:length(alpha.vals)) {
      x <- as.matrix(dat.train[,1:10])
      y <- dat.train[,11]
      fit.lar <- lars(x1,y1,type="ridge",lambda=alpha.vals[i])
      BEL2[i,] <- fit.lar$arc.length
      x <- as.matrix(dat.test[,1:10])
      y <- dat.test[,11]
      fit.mcmc <- mcmcbayes(x,y, start = thetas.start, n_iters = realdata_iter, burnin = realdata_burn)
      BL2[i,] <- apply(fit.mcmc[,-11], 2, mean)
    }
    colnames(BEL2) <- colnames(dat)[-ncol(dat)]
    colnames(BL2)  <- colnames(dat)[-ncol(dat)]
    BEL <- apply(BEL2, 2, scale)
    BL <- apply(BL2, 2, scale)
    
    ggplot(BEL2, aes(x=1:11))+
      geom_line(aes(y=AGE), col=8, lty=8, size=1.1) +
      geom_dl(aes(label = 'age',y=AGE), method = dl.combine("first.points"), col=8) +
      geom_line(aes(y=SEX), col=9, lty=9, size=1.1) +
      geom_dl(aes(label = 'sex',y=SEX), method = dl.combine("first.points"), col=9) +
      geom_line(aes(y=BMI), col=3, lty=3, size=1.1) +
      geom_dl(aes(label = 'bmi',y=BMI), method = dl.combine("first.points"), col=3) +
      geom_line(aes(y=BP), col=5, lty=5, size=1.1) +
      geom_dl(aes(label = 'bp',y=BP), method = dl.combine("first.points"), col=5) +
      geom_line(aes(y=S1), col=10, lty=10, size=1.1) +
      geom_dl(aes(label = 's1',y=S1), method = dl.combine("first.points"), col=10) +
      geom_line(aes(y=S2), col=4, lty=4, size=1.1) +
      geom_dl(aes(label = 's2',y=S2), method = dl.combine("first.points"), col=4) +
      geom_line(aes(y=S3), col=11, lty=11, size=1.1) +
      geom_dl(aes(label = 's3',y=S3), method = dl.combine("first.points"), col=11) +
      geom_line(aes(y=S4), col=6, lty=6, size=1.1) +
      geom_dl(aes(label = 's4',y=S4), method = dl.combine("first.points"), col=6) +
      geom_line(aes(y=S5), col=2, lty=2, size=1.1) +
      geom_dl(aes(label = 's5',y=S5), method = dl.combine("first.points"), col=2) +
      geom_line(aes(y=S6), col=7, lty=7, size=1.1) +
      geom_dl(aes(label = 's6',y=S6), method = dl.combine("first.points"), col=7) +
      theme_classic() + scale_x_continuous(breaks=1:11, labels=alpha.vals) +
      scale_y_continuous(n.breaks = 10) +
      labs(x=expression(alpha), y='Stanrdardized Coefficient', title = 'Bayesian empirical likelihood for ridge trace plot')
    ggsave('fig2left.jpg', width = 10)
    
    ggplot(BL2, aes(x=1:11))+
      geom_line(aes(y=AGE), col=8, lty=8, size=1.1) +
      geom_dl(aes(label = 'age',y=AGE), method = dl.combine("first.points"), col=8) +
      geom_line(aes(y=SEX), col=9, lty=9, size=1.1) +
      geom_dl(aes(label = 'sex',y=SEX), method = dl.combine("first.points"), col=9) +
      geom_line(aes(y=BMI), col=3, lty=3, size=1.1) +
      geom_dl(aes(label = 'bmi',y=BMI), method = dl.combine("first.points"), col=3) +
      geom_line(aes(y=BP), col=5, lty=5, size=1.1) +
      geom_dl(aes(label = 'bp',y=BP), method = dl.combine("first.points"), col=5) +
      geom_line(aes(y=S1), col=10, lty=10, size=1.1) +
      geom_dl(aes(label = 's1',y=S1), method = dl.combine("first.points"), col=10) +
      geom_line(aes(y=S2), col=4, lty=4, size=1.1) +
      geom_dl(aes(label = 's2',y=S2), method = dl.combine("first.points"), col=4) +
      geom_line(aes(y=S3), col=11, lty=11, size=1.1) +
      geom_dl(aes(label = 's3',y=S3), method = dl.combine("first.points"), col=11) +
      geom_line(aes(y=S4), col=6, lty=6, size=1.1) +
      geom_dl(aes(label = 's4',y=S4), method = dl.combine("first.points"), col=6) +
      geom_line(aes(y=S5), col=2, lty=2, size=1.1) +
      geom_dl(aes(label = 's5',y=S5), method = dl.combine("first.points"), col=2) +
      geom_line(aes(y=S6), col=7, lty=7, size=1.1) +
      geom_dl(aes(label = 's6',y=S6), method = dl.combine("first.points"), col=7) +
      theme_classic() + scale_x_continuous(breaks=1:11, labels=alpha.vals) +
      scale_y_continuous(n.breaks = 10) +
      labs(x=expression(alpha), y='Stanrdardized Coefficient', title = 'Bayesian ridge trace plot')
    ggsave('fig2left.jpg', width = 10)
    
    ggplot(BEL, aes(x=1:10))+
      geom_line(aes(y=AGE), col=8, lty=8, size=1.1) +
      geom_dl(aes(label = 'age',y=AGE), method = dl.combine("first.points"), col=8) +
      geom_line(aes(y=SEX), col=9, lty=9, size=1.1) +
      geom_dl(aes(label = 'sex',y=SEX), method = dl.combine("first.points"), col=9) +
      geom_line(aes(y=BMI), col=3, lty=3, size=1.1) +
      geom_dl(aes(label = 'bmi',y=BMI), method = dl.combine("first.points"), col=3) +
      geom_line(aes(y=BP), col=5, lty=5, size=1.1) +
      geom_dl(aes(label = 'bp',y=BP), method = dl.combine("first.points"), col=5) +
      geom_line(aes(y=S1), col=10, lty=10, size=1.1) +
      geom_dl(aes(label = 's1',y=S1), method = dl.combine("first.points"), col=10) +
      geom_line(aes(y=S2), col=4, lty=4, size=1.1) +
      geom_dl(aes(label = 's2',y=S2), method = dl.combine("first.points"), col=4) +
      geom_line(aes(y=S3), col=11, lty=11, size=1.1) +
      geom_dl(aes(label = 's3',y=S3), method = dl.combine("first.points"), col=11) +
      geom_line(aes(y=S4), col=6, lty=6, size=1.1) +
      geom_dl(aes(label = 's4',y=S4), method = dl.combine("first.points"), col=6) +
      geom_line(aes(y=S5), col=2, lty=2, size=1.1) +
      geom_dl(aes(label = 's5',y=S5), method = dl.combine("first.points"), col=2) +
      geom_line(aes(y=S6), col=7, lty=7, size=1.1) +
      geom_dl(aes(label = 's6',y=S6), method = dl.combine("first.points"), col=7) +
      theme_classic() + scale_x_continuous(breaks=1:10, labels=alpha.vals) +
      scale_y_continuous(n.breaks = 10) +
      labs(x=expression(alpha), y='Stanrdardized Coefficient', title = 'Bayesian empirical likelihood for lasso trace plot')
    ggsave('fig3left.jpg', width = 10)
    
    ggplot(BL, aes(x=1:10))+
      geom_line(aes(y=AGE), col=8, lty=8, size=1.1) +
      geom_dl(aes(label = 'age',y=AGE), method = dl.combine("first.points"), col=8) +
      geom_line(aes(y=SEX), col=9, lty=9, size=1.1) +
      geom_dl(aes(label = 'sex',y=SEX), method = dl.combine("first.points"), col=9) +
      geom_line(aes(y=BMI), col=3, lty=3, size=1.1) +
      geom_dl(aes(label = 'bmi',y=BMI), method = dl.combine("first.points"), col=3) +
      geom_line(aes(y=BP), col=5, lty=5, size=1.1) +
      geom_dl(aes(label = 'bp',y=BP), method = dl.combine("first.points"), col=5) +
      geom_line(aes(y=S1), col=10, lty=10, size=1.1) +
      geom_dl(aes(label = 's1',y=S1), method = dl.combine("first.points"), col=10) +
      geom_line(aes(y=S2), col=4, lty=4, size=1.1) +
      geom_dl(aes(label = 's2',y=S2), method = dl.combine("first.points"), col=4) +
      geom_line(aes(y=S3), col=11, lty=11, size=1.1) +
      geom_dl(aes(label = 's3',y=S3), method = dl.combine("first.points"), col=11) +
      geom_line(aes(y=S4), col=6, lty=6, size=1.1) +
      geom_dl(aes(label = 's4',y=S4), method = dl.combine("first.points"), col=6) +
      geom_line(aes(y=S5), col=2, lty=2, size=1.1) +
      geom_dl(aes(label = 's5',y=S5), method = dl.combine("first.points"), col=2) +
      geom_line(aes(y=S6), col=7, lty=7, size=1.1) +
      geom_dl(aes(label = 's6',y=S6), method = dl.combine("first.points"), col=7) +
      scale_y_continuous(n.breaks = 10) +
      labs(x=expression(alpha), y='Stanrdardized Coefficient', title = 'Bayesian lasso trace plot')
    ggsave('fig3right.jpg', width = 10)
    #####################################################
    mdata <- as.data.frame(fit.mcmc[,-11])
    colnames(mdata) <- colnames(dat)[-ncol(dat)]
    p1 <- ggplot(data=mdata,aes(x=1:4000, y=AGE)) +
      geom_line() +
      labs(x='Index', y=expression(theta[1]), title = 'trace plot of Age')
    
    p2 <- ggplot(data=mdata,aes(x=AGE)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of Age')
    
    p3 <- ggplot(data=mdata,aes(x=1:4000, y=SEX)) +
      geom_line() +
      labs(x='Index', y=expression(theta[2]), title = 'trace plot of Sex')
    
    p4 <- ggplot(data=mdata,aes(x=SEX)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of Sex')
    
    p5 <- ggplot(data=mdata,aes(x=1:4000, y=BMI)) +
      geom_line() +
      labs(x='Index', y=expression(theta[3]), title = 'trace plot of BMI')
    
    p6 <- ggplot(data=mdata,aes(x=BMI)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of BMI')
    
    p8 <- ggplot(data=mdata,aes(x=BP)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of BP')
    
    p7 <- ggplot(data=mdata,aes(x=1:4000, y=BP)) +
      geom_line() +
      labs(x='Index', y=expression(theta[4]), title = 'trace plot of BP')
    
    p10 <- ggplot(data=mdata,aes(x=S1)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of S1')
    
    p9 <- ggplot(data=mdata,aes(x=1:4000, y=S1)) +
      geom_line() +
      labs(x='Index', y=expression(theta[5]), title = 'trace plot of S1')
    
    p12 <- ggplot(data=mdata,aes(x=S2)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of S2')
    
    p11 <- ggplot(data=mdata,aes(x=1:4000, y=S2)) +
      geom_line() +
      labs(x='Index', y=expression(theta[6]), title = 'trace plot of S2')
    
    p14 <- ggplot(data=mdata,aes(x=S3)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of S3')
    
    p13 <- ggplot(data=mdata,aes(x=1:4000, y=S3)) +
      geom_line() +
      labs(x='Index', y=expression(theta[7]), title = 'trace plot of S3')
    
    p16 <- ggplot(data=mdata,aes(x=S4)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of S4')
    
    p15 <- ggplot(data=mdata,aes(x=1:4000, y=S4)) +
      geom_line() +
      labs(x='Index', y=expression(theta[8]), title = 'trace plot of S4')
    
    p18 <- ggplot(data=mdata,aes(x=S5)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of S5')
    
    p17 <- ggplot(data=mdata,aes(x=1:4000, y=S5)) +
      geom_line() +
      labs(x='Index', y=expression(theta[9]), title = 'trace plot of S5')
    
    p20 <- ggplot(data=mdata,aes(x=S6)) +
      geom_histogram(aes(y = ..density..), colour = 1, fill = "white", bins=30, alpha=.25) +
      geom_density()+
      labs(x='', y='Density', title = 'Histogram distribution of S6')
    
    p19 <- ggplot(data=mdata,aes(x=1:4000, y=S6)) +
      geom_line() +
      labs(x='Index', y=expression(theta[10]), title = 'trace plot of S6')
    p1 + p2
    ggsave('fig4row1.jpg', width = 25)
    p3 + p4
    ggsave('fig4row2.jpg', width = 25)
    p5 + p6
    ggsave('fig4row3.jpg', width = 25)
    p7 + p8
    ggsave('fig4row4.jpg', width = 25)
    p9 + p10
    ggsave('fig4row5.jpg', width = 25)
    p11 + p12
    ggsave('fig4row6.jpg', width = 25)
    p13 + p14
    ggsave('fig4row7.jpg', width = 25)
    p15 + p16
    ggsave('fig4row8.jpg', width = 25)
    p17 + p18
    ggsave('fig4row9.jpg', width = 25)
    p19 + p20
    ggsave('fig4row10.jpg', width = 25)
    resf <- list(table1=table1, table2=table2, table3=table3, table4=table4, table5_hpd=table5_hpd,table5_credible=table5_cri); 
  }
  return('resf')
}

Outputs

final_tables <- Final(simulation_iter=10000, simulation_burn=2000, realdata_iter=5000, realdata_burn=1000)
final_tables$table1
final_tables$table2
final_tables$table3
final_tables$table4
final_tables$table5_hpd
final_tables$table5_credible
## $table1
##         n shrinkage        BEL      Bayes       Freq        BEL      Bayes
## theta  NA        NA 3.00000000 3.00000000 3.00000000 1.00000000 1.00000000
## Lasso  20       0.0 0.21549602 0.21849554 0.21458354 0.24217422 0.24413997
## Lasso  20       0.5 0.21725190 0.21801159 0.52826653 0.22930445 0.23932899
## Lasso  20       1.0 0.26391502 0.21438765 1.30878097 0.22318360 0.24126077
## Lasso  20       1.5 0.51803459 0.21499612 2.54770931 0.23231527 0.23854086
## Lasso  20       2.0 1.26989210 0.21760729 4.26846627 0.35806713 0.22903129
## Lasso  20       3.0 4.28906434 0.21709829 7.98829680 0.64566104 0.22023897
## Ridge  20       0.0 0.88361209 0.24185246 0.21530044 0.20397835 0.23408930
## Ridge  20       0.5 0.99678412 0.22556838 0.35148178 0.21576999 0.21990702
## Ridge  20       1.0 1.12919309 0.21212202 0.66270025 0.22470815 0.20798128
## Ridge  20       1.5 1.25875386 0.19762904 1.01920655 0.23487767 0.19904571
## Ridge  20       2.0 1.40121345 0.19167116 1.37827991 0.24333907 0.19720966
## Ridge  20       3.0 1.64004752 0.17789053 2.05723228 0.27074823 0.19101283
## Lasso  40       0.0 0.11954838 0.12270119 0.12253917 0.08760733 0.08656137
## Lasso  40       0.5 0.11377307 0.12319941 0.36038301 0.08402561 0.08516228
## Lasso  40       1.0 0.12218487 0.12227645 1.10600684 0.08306465 0.08774820
## Lasso  40       1.5 0.17035336 0.12178381 2.32645002 0.10530555 0.08596796
## Lasso  40       2.0 0.34609925 0.12185597 4.06615905 0.16750428 0.08418898
## Lasso  40       3.0 2.08462280 0.12369106 8.17867378 0.40377367 0.08291738
## Ridge  40       0.0 0.42804380 0.12231214 0.09479463 0.09480525 0.08629162
## Ridge  40       0.5 0.48665976 0.11529716 0.06857669 0.09833067 0.08221176
## Ridge  40       1.0 0.55130998 0.11368015 0.06060480 0.10274208 0.07812885
## Ridge  40       1.5 0.62343984 0.11671121 0.05190823 0.10910315 0.07697543
## Ridge  40       2.0 0.70463912 0.12184543 0.04614381 0.11465130 0.07143824
## Ridge  40       3.0 0.84038301 0.13660806 0.04223701 0.13477865 0.06776936
## Lasso 100       0.0 0.03938030 0.04980309 0.04742276 0.04970077 0.05375467
## Lasso 100       0.5 0.03674018 0.04726211 0.03971577 0.05309978 0.06202964
## Lasso 100       1.0 0.04134736 0.04831981 0.04272433 0.05155445 0.05963388
## Lasso 100       1.5 0.04814918 0.05101725 0.04008347 0.06580769 0.07015927
## Lasso 100       2.0 0.06980654 0.04779948 0.04077756 0.08375977 0.11904456
## Lasso 100       3.0 0.16668348 0.05072506 0.04123593 0.14904431 0.15383561
## Ridge 100       0.0 0.14664853 0.03905492 0.03957428 0.04903510 0.05067488
## Ridge 100       0.5 0.16742697 0.03932465 0.15606968 0.05349938 0.06335305
## Ridge 100       1.0 0.18990773 0.03802631 0.43306645 0.07059786 0.07194502
## Ridge 100       1.5 0.21108076 0.03897260 0.77219918 0.07225581 0.07460703
## Ridge 100       2.0 0.23795612 0.04094515 1.12836101 0.07194662 0.07115047
## Ridge 100       3.0 0.29094311 0.04047735 1.81457973 0.07962392 0.07333603
##             Freq        BEL      Bayes       Freq         BEL       Bayes
## theta 1.00000000 0.20000000 0.20000000 0.20000000 -0.50000000 -0.50000000
## Lasso 0.25400381 0.25618427 0.23143643 0.24014865  0.26690279  0.27153237
## Lasso 0.42953495 0.22327750 0.23249639 0.25257158  0.26640003  0.27284022
## Lasso 0.72748167 0.15646567 0.23394810 0.24153558  0.19750969  0.27337763
## Lasso 0.93575175 0.11727024 0.23329387 0.24070100  0.16973991  0.27164485
## Lasso 0.99363387 0.09150971 0.23254453 0.24043089  0.17734734  0.26865162
## Lasso 1.00035032 0.07213762 0.22905675 0.24018906  0.22673489  0.25947395
## Ridge 0.23426282 0.17402233 0.21475187 0.24148454  0.19877165  0.27501062
## Ridge 0.22049553 0.17806344 0.20657864 0.18210877  0.19923907  0.25772698
## Ridge 0.20874242 0.17864739 0.21779984 0.15509991  0.20767069  0.24498140
## Ridge 0.23042075 0.17486811 0.23873331 0.14034185  0.20852213  0.23391143
## Ridge 0.25391788 0.17715051 0.27095621 0.12537736  0.20861720  0.22189696
## Ridge 0.30671632 0.17056031 0.36214518 0.10940965  0.21445887  0.21021549
## Lasso 0.08395220 0.09355796 0.09757007 0.09435790  0.11379011  0.12094578
## Lasso 0.31234826 0.08429607 0.09624466 0.04379499  0.11427613  0.11919747
## Lasso 0.78680647 0.06640053 0.09443509 0.03856342  0.09825771  0.11768074
## Lasso 0.97023530 0.03964547 0.09329298 0.04088410  0.09195964  0.12055993
## Lasso 0.99868568 0.03968943 0.09467903 0.03803889  0.09106856  0.11559211
## Lasso 1.00129425 0.03694731 0.09222951 0.04184521  0.13970560  0.11864812
## Ridge 0.08602118 0.06992940 0.09742933 0.12413538  0.08993586  0.11933393
## Ridge 0.06861501 0.06726417 0.09101389 0.21863167  0.08442179  0.11594472
## Ridge 0.08829472 0.06991197 0.08953609 0.48507113  0.08515139  0.10805851
## Ridge 0.11608807 0.06778316 0.08674686 0.81491438  0.08893111  0.10100686
## Ridge 0.15032328 0.06435426 0.08586330 1.16943161  0.09004457  0.09931228
## Ridge 0.22078328 0.06516652 0.07807302 1.84677802  0.09101473  0.09074748
## Lasso 0.05059923 0.04821813 0.03716885 0.03859541  0.04850687  0.04913941
## Lasso 0.32589589 0.04703604 0.03759939 0.29768338  0.05131068  0.04910329
## Lasso 0.85228328 0.04163581 0.03682764 1.05588994  0.04931997  0.04943243
## Lasso 0.98827142 0.03576003 0.04094215 2.29241760  0.05399363  0.05095940
## Lasso 0.99958728 0.03383365 0.03732830 4.04480682  0.05525483  0.05325913
## Lasso 1.00270137 0.02916191 0.03980371 8.45587698  0.07268673  0.05155540
## Ridge 0.05180112 0.04118067 0.05192418 0.04827503  0.04547647  0.05196324
## Ridge 0.06108719 0.04481608 0.04786858 0.04171977  0.04177966  0.04953027
## Ridge 0.09255386 0.04368722 0.04733827 0.03693028  0.04601248  0.05190759
## Ridge 0.12938580 0.04371167 0.05055204 0.03363667  0.04372450  0.05063612
## Ridge 0.16869753 0.04497723 0.04816325 0.03046179  0.04662965  0.04724337
## Ridge 0.23863559 0.04279121 0.04636648 0.02556555  0.04827849  0.04991558
##              Freq
## theta -0.50000000
## Lasso  0.27406497
## Lasso  0.28043691
## Lasso  0.21752678
## Lasso  0.24699089
## Lasso  0.24830037
## Lasso  0.25069308
## Ridge  0.27193748
## Ridge  0.21775587
## Ridge  0.19308574
## Ridge  0.18268226
## Ridge  0.17678752
## Ridge  0.17789551
## Lasso  0.11934984
## Lasso  0.17512642
## Lasso  0.24273598
## Lasso  0.24896973
## Lasso  0.25282837
## Lasso  0.25129319
## Ridge  0.11909665
## Ridge  0.08152418
## Ridge  0.07312782
## Ridge  0.07102335
## Ridge  0.07409367
## Ridge  0.08453076
## Lasso  0.05333000
## Lasso  0.19637613
## Lasso  0.25018310
## Lasso  0.25190120
## Lasso  0.25178920
## Lasso  0.24965330
## Ridge  0.04911149
## Ridge  0.04592497
## Ridge  0.05283840
## Ridge  0.05628775
## Ridge  0.06446502
## Ridge  0.07910494
## 
## $table2
##         n shrinkage          BEL         Bayes         Freq         BEL
## theta  NA        NA  3.000000000  3.0000000000  3.000000000  1.00000000
## Lasso  20       0.0  0.018584379  0.0092319195  0.010338663  0.02537438
## Lasso  20       0.5 -0.031940340  0.0100130718 -0.540404766 -0.01128714
## Lasso  20       1.0 -0.192854335  0.0069797787 -1.021205212 -0.13207676
## Lasso  20       1.5 -0.469546899  0.0045574991 -1.505845191 -0.29112836
## Lasso  20       2.0 -0.889429548 -0.0018468921 -2.004999721 -0.46565600
## Lasso  20       3.0 -1.890231960 -0.0213004617 -2.810525153 -0.75890773
## Ridge  20       0.0 -0.890570118  0.0122250827  0.009266168 -0.26629957
## Ridge  20       0.5 -0.955901467 -0.0817520585 -0.401490468 -0.29008014
## Ridge  20       1.0 -1.018449993 -0.1664162843 -0.697741574 -0.30817840
## Ridge  20       1.5 -1.079867073 -0.2461913654 -0.924621339 -0.33345640
## Ridge  20       2.0 -1.147406462 -0.3192124232 -1.109470290 -0.34864116
## Ridge  20       3.0 -1.247521548 -0.4560393789 -1.392583358 -0.38458809
## Lasso  40       0.0  0.048200175  0.0455234268  0.043404922  0.05355936
## Lasso  40       0.5  0.018400675  0.0441469976 -0.486284366  0.03185195
## Lasso  40       1.0 -0.071999637  0.0432256472 -0.985937361 -0.04761976
## Lasso  40       1.5 -0.207887077  0.0424700823 -1.477341764 -0.15599174
## Lasso  40       2.0 -0.415658338  0.0396935035 -1.981594307 -0.27500543
## Lasso  40       3.0 -1.197992545  0.0362856986 -2.851474128 -0.56211299
## Ridge  40       0.0 -0.607224587  0.0415166836  0.044142140 -0.15864544
## Ridge  40       0.5 -0.654492450  0.0023496293 -0.335310429 -0.17912692
## Ridge  40       1.0 -0.699267233 -0.0383844675 -0.628387213 -0.19171687
## Ridge  40       1.5 -0.748900210 -0.0778502190 -0.859843458 -0.21051006
## Ridge  40       2.0 -0.800346778 -0.1149454747 -1.047512008 -0.22447496
## Ridge  40       3.0 -0.879003830 -0.1902099480 -1.334493228 -0.25103296
## Lasso 100       0.0  0.013412649  0.0126764233  0.014015260 -0.01354242
## Lasso 100       0.5  0.002944393  0.0128832253 -0.499505916 -0.02520402
## Lasso 100       1.0 -0.029726001  0.0146398151 -0.998012759 -0.05885761
## Lasso 100       1.5 -0.087072324  0.0127379459 -1.492253194 -0.11104067
## Lasso 100       2.0 -0.152188624  0.0117125428 -1.995316766 -0.17861274
## Lasso 100       3.0 -0.322733015  0.0149910017 -2.903314209 -0.31104757
## Ridge 100       0.0 -0.340698838  0.0161371670  0.014937251 -0.13503651
## Ridge 100       0.5 -0.367450866  0.0004376095 -0.351133886 -0.14405311
## Ridge 100       1.0 -0.395099781 -0.0179878016 -0.635731440 -0.15561195
## Ridge 100       1.5 -0.426011355 -0.0341839974 -0.860640999 -0.16386976
## Ridge 100       2.0 -0.455137886 -0.0464402327 -1.047233301 -0.17156904
## Ridge 100       3.0 -0.505530550 -0.0753135457 -1.334637349 -0.18916768
##               Bayes        Freq          BEL       Bayes         Freq
## theta  1.0000000000  1.00000000  0.200000000 0.200000000  0.200000000
## Lasso  0.0157560873  0.01380139  0.036985235 0.038731883  0.038802357
## Lasso  0.0134456051 -0.50627631  0.026475916 0.037335175 -0.148719162
## Lasso  0.0119311809 -0.80884898 -0.015546304 0.037595688 -0.197234913
## Lasso  0.0047508160 -0.96034552 -0.052108616 0.035266989 -0.198831533
## Lasso -0.0009903771 -0.99483956 -0.076693095 0.032118109 -0.197560997
## Lasso -0.0145466566 -0.99904733 -0.145246189 0.028266364 -0.198504437
## Ridge  0.0141989214  0.01112754 -0.037629886 0.037396904  0.041587342
## Ridge -0.0163754681 -0.12045508 -0.039120779 0.033771096  0.009030736
## Ridge -0.0428075639 -0.21748988 -0.053321070 0.026548699 -0.016365226
## Ridge -0.0696993309 -0.29363931 -0.046397839 0.018653919 -0.034695522
## Ridge -0.0924055277 -0.35020581 -0.056183267 0.014426198 -0.044118623
## Ridge -0.1342869008 -0.44310755 -0.074898627 0.002547905 -0.068050578
## Lasso  0.0573727839  0.05845516  0.043262779 0.037581048  0.039551249
## Lasso  0.0550387867 -0.47252327  0.035366421 0.037016561 -0.174754096
## Lasso  0.0576172128 -0.86380072  0.004721436 0.039799178 -0.195815215
## Lasso  0.0556375957 -0.98413322 -0.034238857 0.034814848 -0.197763774
## Lasso  0.0535319236 -0.99933986 -0.071533376 0.036925499 -0.201797408
## Lasso  0.0490616355 -0.99918686 -0.132048372 0.034131181 -0.199915820
## Ridge  0.0572974604  0.05652191 -0.024880237 0.039117922  0.038659109
## Ridge  0.0442747548 -0.07455761 -0.021483338 0.035133159  0.003125419
## Ridge  0.0299157577 -0.17957658 -0.035718487 0.030548202 -0.019030034
## Ridge  0.0155558105 -0.25975398 -0.028042215 0.028379503 -0.040553536
## Ridge  0.0025253392 -0.32135684 -0.039604394 0.025810260 -0.052821695
## Ridge -0.0222647376 -0.42239425 -0.052747961 0.020375543 -0.077462826
## Lasso -0.0162890390 -0.01242632  0.007936421 0.010541234  0.010723000
## Lasso -0.0145479718 -0.52108371  0.004405296 0.012658121 -0.186332207
## Lasso -0.0146644581 -0.90886616 -0.012129269 0.012358544 -0.198679719
## Lasso -0.0151595968 -0.99347002 -0.028963516 0.010490365 -0.200086037
## Lasso -0.0157591798 -0.99833935 -0.041047427 0.011645818 -0.201275200
## Lasso -0.0146749192 -0.99983286 -0.073150785 0.012040443 -0.200050543
## Ridge -0.0162891304 -0.01687146 -0.005162670 0.010178660  0.012352906
## Ridge -0.0212582853 -0.13418058 -0.008353308 0.011164833 -0.007325052
## Ridge -0.0257090271 -0.23073674 -0.014826209 0.008702505 -0.024569076
## Ridge -0.0282851672 -0.30113921 -0.012746392 0.010641893 -0.039151749
## Ridge -0.0327229883 -0.36444686 -0.017024962 0.009492682 -0.050296036
## Ridge -0.0468055236 -0.46011172 -0.020854691 0.005692600 -0.070123711
##                BEL        Bayes        Freq
## theta -0.500000000 -0.500000000 -0.50000000
## Lasso -0.004999481 -0.017793783 -0.01711239
## Lasso  0.012462234 -0.019788752  0.30944363
## Lasso  0.088499925 -0.015247175  0.45294835
## Lasso  0.187701881 -0.014574987  0.49867283
## Lasso  0.266758927 -0.008360017  0.49942834
## Lasso  0.395303277  0.004404919  0.50002816
## Ridge  0.169439350 -0.021987150 -0.01918614
## Ridge  0.184758067 -0.003951807  0.06488350
## Ridge  0.196148207  0.014235134  0.12020548
## Ridge  0.219255053  0.029641113  0.16384178
## Ridge  0.222237976  0.044855477  0.19612843
## Ridge  0.247971700  0.073812769  0.24983562
## Lasso -0.017250861 -0.023454802 -0.02457809
## Lasso -0.001560420 -0.022208880  0.38260948
## Lasso  0.052635662 -0.021888581  0.49136286
## Lasso  0.120828663 -0.024815325  0.49977892
## Lasso  0.183668836 -0.021530815  0.49910856
## Lasso  0.317287919 -0.018241096  0.49981617
## Ridge  0.089678333 -0.025631608 -0.02237497
## Ridge  0.100072925 -0.015957196  0.03860469
## Ridge  0.104316249 -0.011511532  0.09021734
## Ridge  0.111934018 -0.003127078  0.13104000
## Ridge  0.123679879  0.005686263  0.16030068
## Ridge  0.134851802  0.016557292  0.20805915
## Lasso  0.029835278  0.026746612  0.02705597
## Lasso  0.033432447  0.025557075  0.42335203
## Lasso  0.063838169  0.026669356  0.49959451
## Lasso  0.102431140  0.028453017  0.49981045
## Lasso  0.137608959  0.025914951  0.49804459
## Lasso  0.212227458  0.027890819  0.50145725
## Ridge  0.075922056  0.025622667  0.02827831
## Ridge  0.082422429  0.029293423  0.08726050
## Ridge  0.089492665  0.032989688  0.13335796
## Ridge  0.094557020  0.032291740  0.16652238
## Ridge  0.094584909  0.035150054  0.19676392
## Ridge  0.103085650  0.043521826  0.24254510
## 
## $table3
##                 Gamma   Uniform      Beta
## alpha_lasso 0.2245122 0.2163219 0.2350211
## 
## $table4
##                  AGE         SEX       BMI         BP          S1           S2
## Gamma   -0.010143740 -0.08315139 0.2896730 0.08640426 -0.02783931 -0.004716231
## Uniform -0.012490246 -0.08443993 0.2917320 0.08131125 -0.03089719 -0.001823341
## Beta    -0.008593959 -0.07435286 0.2862787 0.08162973 -0.02789565 -0.009147823
##                 S3         S4        S5         S6
## Gamma   -0.1037362 0.02352876 0.3769826 0.05427840
## Uniform -0.1061869 0.02965148 0.3700568 0.05368272
## Beta    -0.1002580 0.02922915 0.3646441 0.05680657
## 
## $table5_hpd
##    Variables    Gamma_LO   Gamma_UP  Uniform_LO Uniform_UP     Beta_LO
## 1        AGE -0.09145429 0.07164444 -0.08977968 0.06480417 -0.09433692
## 2        SEX -0.17474696 0.01578661 -0.17295890 0.01214711 -0.17711771
## 3        BMI  0.16911170 0.40803312  0.16793211 0.41169787  0.16976531
## 4         BP -0.02326637 0.20497849 -0.02093294 0.19299057 -0.01800878
## 5         S1 -0.16489650 0.08741581 -0.15431102 0.09897042 -0.16047609
## 6         S2 -0.11727751 0.11488721 -0.11309463 0.10188720 -0.12528457
## 7         S3 -0.21892087 0.02339712 -0.22662007 0.01485275 -0.22429737
## 8         S4 -0.08835154 0.16407730 -0.08895238 0.14873981 -0.08101754
## 9         S5  0.24261235 0.50247324  0.23788419 0.49315647  0.23891250
## 10        S6 -0.02527726 0.15837493 -0.02070295 0.14750498 -0.03185047
##       Beta_UP
## 1  0.07552065
## 2  0.01714517
## 3  0.40419798
## 4  0.19544274
## 5  0.09036857
## 6  0.09983685
## 7  0.02473921
## 8  0.15244025
## 9  0.51026290
## 10 0.15650034
## 
## $table5_credible
##    Variables    Gamma_LO    Gamma_UP  Uniform_LO  Uniform_UP     Beta_LO
## 1        AGE -0.08868668 0.059119582 -0.08594428 0.069424331 -0.10183099
## 2        SEX -0.17201710 0.012634972 -0.16965863 0.008850007 -0.17077835
## 3        BMI  0.16725342 0.414556155  0.16634870 0.402883173  0.16563962
## 4         BP -0.02023267 0.204448303 -0.02295955 0.199998688 -0.03534961
## 5         S1 -0.16390576 0.098840720 -0.17015168 0.087206877 -0.15789526
## 6         S2 -0.11660884 0.111164523 -0.11567156 0.095827557 -0.12541745
## 7         S3 -0.22821732 0.008140311 -0.22127311 0.006903957 -0.22497593
## 8         S4 -0.07658124 0.160254456 -0.07842732 0.140943169 -0.08977791
## 9         S5  0.22953807 0.497696633  0.23888937 0.499696125  0.24031706
## 10        S6 -0.02276047 0.155762754 -0.02322814 0.144372968 -0.02686959
##       Beta_UP
## 1  0.05405457
## 2  0.01579797
## 3  0.40163147
## 4  0.19102632
## 5  0.08752512
## 6  0.09036776
## 7  0.02028263
## 8  0.15391241
## 9  0.50096365
## 10 0.15322454


follow me for more on

  1. RPubs

  2. Telegram

    1. Channel

    2. Q & A

  3. YouTube

  4. Website

  5. Aparat