library(brms)
library(rstan)

1. Simulasi Data

set.seed(1312)
n  = 250  # Jumlah observasi
x1 = rnorm(n, mean = 3,  sd = 1)
x2 = rnorm(n, mean = 5,  sd = 2)
y  = 1.0 + 3.5 * x1 - 1.5 * x2 + rnorm(n, sd = 1)  # Model regresi linear ganda
data = data.frame(y, x1, x2)
data

2. Model Regresi Bayesian dengan brms

## Pendefinisian Prior
prior_kita = c(
      set_prior("normal(0,10)", class = "b", coef = "x1"),  # Prior untuk b1
      set_prior("normal(0,15)", class = "b", coef = "x2"),  # Prior untuk b2
      set_prior("normal(0,15)", class = "Intercept"))       # Prior untuk b0
         
## Model Regresi Bayesian         
regresi_bayes = brm(
  formula = y ~ x1 + x2,  
  data = data,             
  family = gaussian(),     # Sebaran respon Y (Gaussian untuk regresi linier)
  prior = prior_kita,      # Sebaran prior seperti yang didefinisikan
  chains = 3,              # Banyaknya rantai MCMC
  iter = 2000,             # Banyaknya iterasi tiap rantai pada MCMC
  warmup = 500,            # Banyaknya iterasi warmup
  seed = 1312
)
## Compiling Stan program...
## Start sampling
## 
## SAMPLING FOR MODEL 'anon_model' NOW (CHAIN 1).
## Chain 1: 
## Chain 1: Gradient evaluation took 0.000104 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 1.04 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1: 
## Chain 1: 
## Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 1: Iteration:  501 / 2000 [ 25%]  (Sampling)
## Chain 1: Iteration:  700 / 2000 [ 35%]  (Sampling)
## Chain 1: Iteration:  900 / 2000 [ 45%]  (Sampling)
## Chain 1: Iteration: 1100 / 2000 [ 55%]  (Sampling)
## Chain 1: Iteration: 1300 / 2000 [ 65%]  (Sampling)
## Chain 1: Iteration: 1500 / 2000 [ 75%]  (Sampling)
## Chain 1: Iteration: 1700 / 2000 [ 85%]  (Sampling)
## Chain 1: Iteration: 1900 / 2000 [ 95%]  (Sampling)
## Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 1: 
## Chain 1:  Elapsed Time: 0.049 seconds (Warm-up)
## Chain 1:                0.124 seconds (Sampling)
## Chain 1:                0.173 seconds (Total)
## Chain 1: 
## 
## SAMPLING FOR MODEL 'anon_model' NOW (CHAIN 2).
## Chain 2: 
## Chain 2: Gradient evaluation took 1.6e-05 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.16 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2: 
## Chain 2: 
## Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 2: Iteration:  501 / 2000 [ 25%]  (Sampling)
## Chain 2: Iteration:  700 / 2000 [ 35%]  (Sampling)
## Chain 2: Iteration:  900 / 2000 [ 45%]  (Sampling)
## Chain 2: Iteration: 1100 / 2000 [ 55%]  (Sampling)
## Chain 2: Iteration: 1300 / 2000 [ 65%]  (Sampling)
## Chain 2: Iteration: 1500 / 2000 [ 75%]  (Sampling)
## Chain 2: Iteration: 1700 / 2000 [ 85%]  (Sampling)
## Chain 2: Iteration: 1900 / 2000 [ 95%]  (Sampling)
## Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 2: 
## Chain 2:  Elapsed Time: 0.053 seconds (Warm-up)
## Chain 2:                0.103 seconds (Sampling)
## Chain 2:                0.156 seconds (Total)
## Chain 2: 
## 
## SAMPLING FOR MODEL 'anon_model' NOW (CHAIN 3).
## Chain 3: 
## Chain 3: Gradient evaluation took 1.6e-05 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.16 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3: 
## Chain 3: 
## Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
## Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
## Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
## Chain 3: Iteration:  501 / 2000 [ 25%]  (Sampling)
## Chain 3: Iteration:  700 / 2000 [ 35%]  (Sampling)
## Chain 3: Iteration:  900 / 2000 [ 45%]  (Sampling)
## Chain 3: Iteration: 1100 / 2000 [ 55%]  (Sampling)
## Chain 3: Iteration: 1300 / 2000 [ 65%]  (Sampling)
## Chain 3: Iteration: 1500 / 2000 [ 75%]  (Sampling)
## Chain 3: Iteration: 1700 / 2000 [ 85%]  (Sampling)
## Chain 3: Iteration: 1900 / 2000 [ 95%]  (Sampling)
## Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
## Chain 3: 
## Chain 3:  Elapsed Time: 0.04 seconds (Warm-up)
## Chain 3:                0.111 seconds (Sampling)
## Chain 3:                0.151 seconds (Total)
## Chain 3:

3. Menampilkan Hasil Model

summary(regresi_bayes)
##  Family: gaussian 
##   Links: mu = identity; sigma = identity 
## Formula: y ~ x1 + x2 
##    Data: data (Number of observations: 250) 
##   Draws: 3 chains, each with iter = 2000; warmup = 500; thin = 1;
##          total post-warmup draws = 4500
## 
## Regression Coefficients:
##           Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
## Intercept     0.99      0.27     0.47     1.52 1.00     4932     3400
## x1            3.45      0.07     3.31     3.58 1.00     4780     3562
## x2           -1.46      0.04    -1.54    -1.39 1.00     4681     3240
## 
## Further Distributional Parameters:
##       Estimate Est.Error l-95% CI u-95% CI Rhat Bulk_ESS Tail_ESS
## sigma     1.07      0.05     0.99     1.17 1.00     5222     3241
## 
## Draws were sampled using sampling(NUTS). For each parameter, Bulk_ESS
## and Tail_ESS are effective sample size measures, and Rhat is the potential
## scale reduction factor on split chains (at convergence, Rhat = 1).

4. Visualisasi Sebaran Posterior

plot(regresi_bayes)

5. Penduga Y Berdasarkan Sebaran Posterior

y_duga <- posterior_predict(regresi_bayes)
hist(y_duga[,1], main="Sebaran Posterior untuk Penduga Y", xlab="y_duga")

plot(y, xlab = "Observasi [1:250]", ylab ="y_aktual BIRU dan y_duga MERAH", type = "l", col = "blue")
lines(y_duga[4,], type="l", col = "red")

.