Make up

library(glmnet)  
## Loading required package: Matrix
## Loaded glmnet 4.1-8
set.seed(42) 

n <- 1000  
p <- 5000  
real_p <- 15  


x <- matrix(rnorm(n*p), nrow=n, ncol=p)
y <- apply(x[,1:real_p], 1, sum) + rnorm(n)


train_rows <- sample(1:n, .66*n)
x.train <- x[train_rows, ]
x.test <- x[-train_rows, ]

y.train <- y[train_rows]
y.test <- y[-train_rows]




alpha0.fit <- cv.glmnet(x.train, y.train, type.measure="mse", 
                        alpha=0, family="gaussian")


alpha0.predicted <- 
  predict(alpha0.fit, s=alpha0.fit$lambda.1se, newx=x.test)

mean((y.test - alpha0.predicted)^2)
## [1] 14.88459
alpha1.fit <- cv.glmnet(x.train, y.train, type.measure="mse", 
                        alpha=1, family="gaussian")

alpha1.predicted <- 
  predict(alpha1.fit, s=alpha1.fit$lambda.1se, newx=x.test)

mean((y.test - alpha1.predicted)^2)
## [1] 1.184701
alpha0.5.fit <- cv.glmnet(x.train, y.train, type.measure="mse", 
                          alpha=0.5, family="gaussian")

alpha0.5.predicted <- 
  predict(alpha0.5.fit, s=alpha0.5.fit$lambda.1se, newx=x.test)

mean((y.test - alpha0.5.predicted)^2)
## [1] 1.23797
list.of.fits <- list()
for (i in 0:10) {
 
  fit.name <- paste0("alpha", i/10)
  
  list.of.fits[[fit.name]] <-
    cv.glmnet(x.train, y.train, type.measure="mse", alpha=i/10, 
              family="gaussian")
}


results <- data.frame()
for (i in 0:10) {
  fit.name <- paste0("alpha", i/10)
  
  
  predicted <- 
    predict(list.of.fits[[fit.name]], 
            s=list.of.fits[[fit.name]]$lambda.1se, newx=x.test)
  
  
  mse <- mean((y.test - predicted)^2)
  
 
  temp <- data.frame(alpha=i/10, mse=mse, fit.name=fit.name)
  results <- rbind(results, temp)
}


results
##    alpha       mse fit.name
## 1    0.0 14.918840   alpha0
## 2    0.1  2.256924 alpha0.1
## 3    0.2  1.472927 alpha0.2
## 4    0.3  1.362394 alpha0.3
## 5    0.4  1.259794 alpha0.4
## 6    0.5  1.252103 alpha0.5
## 7    0.6  1.253330 alpha0.6
## 8    0.7  1.212927 alpha0.7
## 9    0.8  1.184028 alpha0.8
## 10   0.9  1.182919 alpha0.9
## 11   1.0  1.184701   alpha1
set.seed(42) 

n <- 1000    
p <- 5000     
real_p <- 1500  


x <- matrix(rnorm(n*p), nrow=n, ncol=p)
y <- apply(x[,1:real_p], 1, sum) + rnorm(n)


train_rows <- sample(1:n, .66*n)
x.train <- x[train_rows, ]
x.test <- x[-train_rows, ]

y.train <- y[train_rows]
y.test <- y[-train_rows]

list.of.fits <- list()
for (i in 0:10) {
  fit.name <- paste0("alpha", i/10)
  
  list.of.fits[[fit.name]] <-
    cv.glmnet(x.train, y.train, type.measure="mse", alpha=i/10, 
              family="gaussian")
}

results <- data.frame()
for (i in 0:10) {
  fit.name <- paste0("alpha", i/10)
  
  predicted <- 
    predict(list.of.fits[[fit.name]], 
            s=list.of.fits[[fit.name]]$lambda.1se, newx=x.test)
  
  mse <- mean((y.test - predicted)^2)
  
  temp <- data.frame(alpha=i/10, mse=mse, fit.name=fit.name)
  results <- rbind(results, temp)
}

results
##    alpha      mse fit.name
## 1    0.0 1400.375   alpha0
## 2    0.1 1545.035 alpha0.1
## 3    0.2 1545.035 alpha0.2
## 4    0.3 1545.035 alpha0.3
## 5    0.4 1545.035 alpha0.4
## 6    0.5 1545.035 alpha0.5
## 7    0.6 1545.035 alpha0.6
## 8    0.7 1545.035 alpha0.7
## 9    0.8 1545.035 alpha0.8
## 10   0.9 1545.035 alpha0.9
## 11   1.0 1545.035   alpha1

Including Plots

You can also embed plots, for example: