library(tidyverse)
## -- Attaching packages --------------------------------------------------------- tidyverse 1.2.1 --
## v ggplot2 3.0.0     v purrr   0.2.5
## v tibble  1.4.2     v dplyr   0.7.6
## v tidyr   0.8.1     v stringr 1.3.1
## v readr   1.1.1     v forcats 0.3.0
## Warning: package 'dplyr' was built under R version 3.5.1
## -- Conflicts ------------------------------------------------------------ tidyverse_conflicts() --
## x dplyr::filter() masks stats::filter()
## x dplyr::lag()    masks stats::lag()
library(lme4)
## Warning: package 'lme4' was built under R version 3.5.1
## Loading required package: Matrix
## 
## Attaching package: 'Matrix'
## The following object is masked from 'package:tidyr':
## 
##     expand
library(rstanarm)
## Loading required package: Rcpp
## Warning: package 'Rcpp' was built under R version 3.5.1
## rstanarm (Version 2.17.4, packaged: 2018-04-13 01:51:52 UTC)
## - Do not expect the default priors to remain the same in future rstanarm versions.
## Thus, R scripts should specify priors explicitly, even if they are just the defaults.
## - For execution on a local, multicore CPU with excess RAM we recommend calling
## options(mc.cores = parallel::detectCores())
## - Plotting theme set to bayesplot::theme_default().
library(tidybayes)
## Warning: package 'tidybayes' was built under R version 3.5.1
## NOTE: As of tidybayes version 1.0, several functions, arguments, and output column names
##       have undergone significant name changes in order to adopt a unified naming scheme.
##       See help('tidybayes-deprecated') for more information.
theme_set(theme_grey())
d <- sleepstudy

m <- stan_glmer(
  Reaction ~ Days + (Days | Subject),
  data = d,
  family = gaussian(),
  prior = normal(0, .5),
  prior_covariance = decov(4, 1, 1, 1)
)
## 
## SAMPLING FOR MODEL 'continuous' NOW (CHAIN 1).
## 
## Gradient evaluation took 0 seconds
## 1000 transitions using 10 leapfrog steps per transition would take 0 seconds.
## Adjust your expectations accordingly!
## 
## 
## Iteration:    1 / 2000 [  0%]  (Warmup)
## Iteration:  200 / 2000 [ 10%]  (Warmup)
## Iteration:  400 / 2000 [ 20%]  (Warmup)
## Iteration:  600 / 2000 [ 30%]  (Warmup)
## Iteration:  800 / 2000 [ 40%]  (Warmup)
## Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Iteration: 2000 / 2000 [100%]  (Sampling)
## 
##  Elapsed Time: 8.441 seconds (Warm-up)
##                3.296 seconds (Sampling)
##                11.737 seconds (Total)
## 
## 
## SAMPLING FOR MODEL 'continuous' NOW (CHAIN 2).
## 
## Gradient evaluation took 0 seconds
## 1000 transitions using 10 leapfrog steps per transition would take 0 seconds.
## Adjust your expectations accordingly!
## 
## 
## Iteration:    1 / 2000 [  0%]  (Warmup)
## Iteration:  200 / 2000 [ 10%]  (Warmup)
## Iteration:  400 / 2000 [ 20%]  (Warmup)
## Iteration:  600 / 2000 [ 30%]  (Warmup)
## Iteration:  800 / 2000 [ 40%]  (Warmup)
## Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Iteration: 2000 / 2000 [100%]  (Sampling)
## 
##  Elapsed Time: 7.792 seconds (Warm-up)
##                3.406 seconds (Sampling)
##                11.198 seconds (Total)
## 
## 
## SAMPLING FOR MODEL 'continuous' NOW (CHAIN 3).
## 
## Gradient evaluation took 0 seconds
## 1000 transitions using 10 leapfrog steps per transition would take 0 seconds.
## Adjust your expectations accordingly!
## 
## 
## Iteration:    1 / 2000 [  0%]  (Warmup)
## Iteration:  200 / 2000 [ 10%]  (Warmup)
## Iteration:  400 / 2000 [ 20%]  (Warmup)
## Iteration:  600 / 2000 [ 30%]  (Warmup)
## Iteration:  800 / 2000 [ 40%]  (Warmup)
## Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Iteration: 2000 / 2000 [100%]  (Sampling)
## 
##  Elapsed Time: 7.636 seconds (Warm-up)
##                3.129 seconds (Sampling)
##                10.765 seconds (Total)
## 
## 
## SAMPLING FOR MODEL 'continuous' NOW (CHAIN 4).
## 
## Gradient evaluation took 0 seconds
## 1000 transitions using 10 leapfrog steps per transition would take 0 seconds.
## Adjust your expectations accordingly!
## 
## 
## Iteration:    1 / 2000 [  0%]  (Warmup)
## Iteration:  200 / 2000 [ 10%]  (Warmup)
## Iteration:  400 / 2000 [ 20%]  (Warmup)
## Iteration:  600 / 2000 [ 30%]  (Warmup)
## Iteration:  800 / 2000 [ 40%]  (Warmup)
## Iteration: 1000 / 2000 [ 50%]  (Warmup)
## Iteration: 1001 / 2000 [ 50%]  (Sampling)
## Iteration: 1200 / 2000 [ 60%]  (Sampling)
## Iteration: 1400 / 2000 [ 70%]  (Sampling)
## Iteration: 1600 / 2000 [ 80%]  (Sampling)
## Iteration: 1800 / 2000 [ 90%]  (Sampling)
## Iteration: 2000 / 2000 [100%]  (Sampling)
## 
##  Elapsed Time: 7.314 seconds (Warm-up)
##                4.88 seconds (Sampling)
##                12.194 seconds (Total)
d_fixed <- d %>%
  modelr::data_grid(Days) %>%
  # Simulate a new participant but ignore random effects
  mutate(Subject = "NEW", Prediction = "Fixed: re_formula = NA") %>%
  add_fitted_draws(m, n = 100, re_formula = NA)

d_full <- d %>%
  modelr::data_grid(Days) %>%
  # Simulate a new participant but include random effects
  mutate(Subject = "NEW", Prediction = "Full: re_formula = NULL") %>%
  add_fitted_draws(m, n = 100, re_formula = NULL)

bind_rows(d_fixed, d_full) %>%
  ggplot() +
    aes(x = Days, y = .value) +
    facet_wrap("Prediction") +
    geom_line(
      aes(group = interaction(Subject, .draw)),
      alpha = .1,
      color = "blue")

# Use full n for prediction intervals
d_fixed <- d %>%
  modelr::data_grid(Days) %>%
  # Simulate a new participant but ignore random effects
  mutate(Subject = "NEW", Prediction = "Fixed: re_formula = NA") %>%
  add_fitted_draws(m, n = 4000, re_formula = NA)

d_full <- d %>%
  modelr::data_grid(Days) %>%
  # Simulate a new participant but include random effects
  mutate(Subject = "NEW", Prediction = "Full: re_formula = NULL") %>%
  add_fitted_draws(m, n = 4000, re_formula = NULL)

bind_rows(d_fixed, d_full) %>%
  ggplot() +
    aes(x = Days, y = .value) +
    facet_wrap("Prediction") +
    stat_summary(fun.data = median_hdci) +
    # fixed effect line
    geom_abline(
        slope = fixef(m)[2],
        intercept = fixef(m)[1],
        color = "red")