# ============================================================
# 4. MATRIX LEVEL-2 Z (RATA-RATA PER KOTA)
# ============================================================
Z_tmp <- bayes_dat %>%
group_by(kota_f) %>%
summarise(
Z1_s = mean(Z1_s),
Z2_s = mean(Z2_s),
.groups = "drop"
)
Z_tmp <- Z_tmp[order(as.integer(Z_tmp$kota_f)), ]
Z_mat <- as.matrix(Z_tmp[, c("Z1_s","Z2_s")])
# ============================================================
# 5. KOMPONEN MIXTURE SESUAI SOAL
# X1 ≤ 0.1 → komponen 1, X1 > 0.1 → komponen 2
# ============================================================
comp <- ifelse(bayes_dat$X1 <= 0.1, 1L, 2L)
table(comp)
## comp
## 1 2
## 233 148
# ============================================================
# 6. DATA UNTUK STAN (SESUIAI FILE mix_hier_skewnormal.stan)
# ============================================================
N <- nrow(bayes_dat)
J <- length(unique(bayes_dat$kota_f))
K <- 2L # jumlah komponen mixture
R <- 5L # X1..X5
M <- 2L # Z1, Z2
X_mat <- as.matrix(
bayes_dat[, c("X1_s","X2_s","X3_s","X4_s","X5_s")]
)
stan_data <- list(
N = N,
J = J,
K = K,
R = R,
M = M,
Y = bayes_dat$Y,
X = X_mat,
kota = as.integer(bayes_dat$kota_f),
Z = Z_mat,
comp = comp # dipakai di Stan untuk menentukan komponen
)
# ============================================================
# 7. RUN STAN MODEL (MIXTURE HIERARKI SKEW-NORMAL)
# ============================================================
fit_hier_mix <- stan(
file = "mix_hier_skewnormal.stan", # pastikan file Stan sudah pakai 'comp'
data = stan_data,
chains = 4,
warmup = 1000,
iter = 2000,
seed = 123,
control = list(
adapt_delta = 0.90,
max_treedepth = 20
)
)
## Warning: There were 3800 divergent transitions after warmup. See
## https://mc-stan.org/misc/warnings.html#divergent-transitions-after-warmup
## to find out why this is a problem and how to eliminate them.
## Warning: There were 2 chains where the estimated Bayesian Fraction of Missing Information was low. See
## https://mc-stan.org/misc/warnings.html#bfmi-low
## Warning: Examine the pairs() plot to diagnose sampling problems
## Warning: The largest R-hat is 1.13, indicating chains have not mixed.
## Running the chains for more iterations may help. See
## https://mc-stan.org/misc/warnings.html#r-hat
## Warning: Bulk Effective Samples Size (ESS) is too low, indicating posterior means and medians may be unreliable.
## Running the chains for more iterations may help. See
## https://mc-stan.org/misc/warnings.html#bulk-ess
## Warning: Tail Effective Samples Size (ESS) is too low, indicating posterior variances and tail quantiles may be unreliable.
## Running the chains for more iterations may help. See
## https://mc-stan.org/misc/warnings.html#tail-ess
saveRDS(fit_hier_mix, "fit_hier_mix_final.rds")
# ============================================================
# 8. RINGKASAN PARAMETER (LEVEL 1 & LEVEL 2)
# ============================================================
print(
fit_hier_mix,
pars = c(
"alpha", # skewness
"sigma", # sd residual
"beta0", # intercept per kota & komponen
"beta", # slope level 1
"gamma0_0", # intercept global level 2
"gamma0", # efek Z pada intercept
"gamma", # efek Z pada slope
"tau0","tau" # sd random effect
),
probs = c(0.025, 0.5, 0.975)
)
## Inference for Stan model: anon_model.
## 4 chains, each with iter=2000; warmup=1000; thin=1;
## post-warmup draws per chain=1000, total post-warmup draws=4000.
##
## mean se_mean sd 2.5% 50% 97.5% n_eff Rhat
## alpha[1] 1.85 0.17 1.92 -0.37 1.40 6.52 123 1.03
## alpha[2] 4.53 0.03 0.78 3.10 4.46 6.36 517 1.01
## sigma[1] 1.11 0.03 1.00 0.04 0.91 3.83 882 1.00
## sigma[2] 11.55 0.03 0.52 10.55 11.51 12.60 347 1.02
## beta0[1,1] 0.30 0.37 5.20 -9.61 -0.37 10.99 199 1.01
## beta0[1,2] 46.77 0.06 0.97 44.75 46.88 48.66 302 1.02
## beta0[2,1] 0.28 0.41 5.24 -10.09 -0.25 10.19 163 1.02
## beta0[2,2] 41.53 0.05 0.98 39.62 41.43 43.68 367 1.01
## beta0[3,1] -0.01 0.49 5.45 -10.92 -0.40 10.62 124 1.02
## beta0[3,2] 40.99 0.04 0.73 39.47 41.07 42.41 405 1.01
## beta0[4,1] 0.34 0.37 5.12 -9.80 -0.36 10.81 193 1.01
## beta0[4,2] 44.92 0.09 0.99 42.70 45.00 46.66 135 1.04
## beta0[5,1] 0.02 0.40 5.02 -9.87 -0.34 9.73 156 1.02
## beta0[5,2] 42.63 0.04 0.86 40.92 42.54 44.49 378 1.03
## beta0[6,1] -0.56 0.63 7.11 -14.17 -0.97 13.54 127 1.02
## beta0[6,2] 41.74 0.09 1.35 39.00 41.92 44.26 207 1.01
## beta0[7,1] 0.04 0.44 5.16 -10.14 -0.30 10.06 139 1.02
## beta0[7,2] 41.29 0.06 0.88 39.17 41.40 42.86 225 1.02
## beta0[8,1] 0.16 0.45 4.98 -9.25 -0.24 10.00 123 1.02
## beta0[8,2] 44.10 0.05 0.82 42.51 44.11 45.80 253 1.03
## beta0[9,1] 0.00 0.48 5.78 -10.62 -0.28 11.74 146 1.01
## beta0[9,2] 52.32 0.32 2.44 48.19 52.02 58.06 59 1.12
## beta[1,1,1] -0.15 0.20 2.84 -5.52 -0.41 5.59 206 1.02
## beta[1,1,2] 0.82 0.26 3.12 -5.31 1.01 6.43 141 1.03
## beta[1,1,3] -0.38 0.21 3.52 -7.67 -0.50 6.41 279 1.02
## beta[1,1,4] 0.20 0.22 2.72 -5.42 0.29 4.85 150 1.03
## beta[1,1,5] 0.02 0.25 2.75 -5.16 0.14 5.64 116 1.03
## beta[1,2,1] 1.47 0.07 0.91 -0.21 1.47 3.36 171 1.04
## beta[1,2,2] 0.16 0.09 1.16 -2.35 0.27 2.20 186 1.04
## beta[1,2,3] 1.03 0.02 0.49 0.03 1.00 1.98 643 1.01
## beta[1,2,4] -0.51 0.03 0.67 -2.11 -0.46 0.62 420 1.01
## beta[1,2,5] -0.31 0.06 1.12 -2.52 -0.35 2.06 374 1.01
## beta[2,1,1] 0.35 0.17 2.25 -4.12 0.51 4.82 178 1.03
## beta[2,1,2] -0.18 0.21 2.64 -5.07 -0.33 5.26 153 1.03
## beta[2,1,3] 0.03 0.25 2.95 -5.90 0.05 5.56 136 1.02
## beta[2,1,4] 0.02 0.11 1.92 -3.82 -0.02 3.67 311 1.01
## beta[2,1,5] 0.00 0.14 2.39 -4.60 -0.02 4.70 301 1.01
## beta[2,2,1] -1.01 0.12 1.28 -3.12 -1.08 1.72 104 1.03
## beta[2,2,2] 0.81 0.13 1.78 -2.40 0.61 4.55 189 1.03
## beta[2,2,3] 0.75 0.05 1.23 -1.74 0.87 3.23 594 1.01
## beta[2,2,4] -0.28 0.11 0.81 -1.64 -0.28 1.34 58 1.05
## beta[2,2,5] 0.95 0.19 1.72 -2.43 0.96 4.31 80 1.06
## beta[3,1,1] 0.33 0.19 2.50 -4.85 0.56 4.75 175 1.03
## beta[3,1,2] -1.01 0.60 3.32 -7.19 -0.73 5.15 31 1.09
## beta[3,1,3] 0.22 0.16 2.85 -5.19 0.27 5.78 323 1.02
## beta[3,1,4] -0.14 0.15 2.22 -3.82 -0.29 4.44 216 1.02
## beta[3,1,5] 0.13 0.16 2.35 -4.61 -0.10 5.17 205 1.02
## beta[3,2,1] -1.58 0.06 0.81 -3.19 -1.64 0.01 173 1.05
## beta[3,2,2] 0.70 0.11 1.08 -1.23 0.68 2.96 102 1.06
## beta[3,2,3] 1.03 0.04 0.93 -0.67 1.00 2.94 427 1.01
## beta[3,2,4] -0.03 0.04 0.59 -1.13 -0.07 1.15 253 1.02
## beta[3,2,5] 1.05 0.10 1.23 -1.49 1.10 3.27 164 1.04
## beta[4,1,1] -0.22 0.42 2.93 -5.05 -0.19 5.39 48 1.06
## beta[4,1,2] 0.56 0.16 2.87 -5.23 0.70 6.21 311 1.01
## beta[4,1,3] -0.03 0.19 3.19 -6.42 0.25 6.16 278 1.03
## beta[4,1,4] 0.17 0.18 2.36 -4.55 0.26 4.23 165 1.03
## beta[4,1,5] 0.11 0.26 2.62 -5.14 0.15 5.23 98 1.03
## beta[4,2,1] 0.05 0.15 0.88 -1.61 0.13 1.59 35 1.07
## beta[4,2,2] 2.28 0.07 1.01 0.41 2.14 4.36 200 1.02
## beta[4,2,3] 2.33 0.14 1.17 0.12 2.33 4.40 69 1.04
## beta[4,2,4] -0.30 0.06 0.67 -1.45 -0.37 1.26 128 1.02
## beta[4,2,5] -0.48 0.13 1.36 -3.39 -0.52 2.18 118 1.03
## beta[5,1,1] 0.04 0.12 1.60 -2.80 -0.04 3.30 171 1.04
## beta[5,1,2] -0.06 0.12 2.19 -4.12 -0.24 4.67 316 1.02
## beta[5,1,3] 0.24 0.25 2.39 -4.43 0.16 4.87 92 1.03
## beta[5,1,4] 0.02 0.06 1.17 -2.28 -0.10 2.36 332 1.01
## beta[5,1,5] -0.14 0.28 1.82 -3.20 0.00 3.33 41 1.07
## beta[5,2,1] -1.05 0.10 1.15 -3.75 -1.00 1.27 131 1.04
## beta[5,2,2] 1.49 0.19 1.22 -0.52 1.34 3.79 43 1.07
## beta[5,2,3] 0.37 0.04 1.16 -1.97 0.49 2.63 694 1.01
## beta[5,2,4] -0.12 0.04 0.71 -1.32 -0.16 1.71 341 1.01
## beta[5,2,5] 0.91 0.08 1.24 -1.49 0.87 3.68 267 1.02
## beta[6,1,1] -0.13 0.33 4.86 -9.97 0.35 9.25 221 1.01
## beta[6,1,2] -1.26 0.34 5.05 -10.85 -1.53 8.65 214 1.02
## beta[6,1,3] 0.33 0.29 5.15 -9.42 -0.01 10.75 309 1.03
## beta[6,1,4] -0.35 0.40 4.78 -8.39 -0.49 9.13 143 1.04
## beta[6,1,5] 0.18 0.36 4.66 -10.21 -0.05 9.18 163 1.03
## beta[6,2,1] -0.99 0.09 1.47 -3.89 -0.84 1.98 241 1.02
## beta[6,2,2] -1.46 0.15 1.42 -4.25 -1.45 1.22 88 1.04
## beta[6,2,3] 2.79 0.16 1.46 -0.04 2.83 5.29 82 1.03
## beta[6,2,4] 0.86 0.05 0.92 -1.05 0.92 2.63 296 1.02
## beta[6,2,5] 0.38 0.17 2.19 -4.34 0.58 4.29 173 1.03
## beta[7,1,1] 0.11 0.12 1.80 -3.42 0.02 3.61 217 1.03
## beta[7,1,2] -0.24 0.13 2.22 -4.62 -0.47 4.35 292 1.02
## beta[7,1,3] -0.65 0.53 2.96 -6.03 -0.53 4.87 31 1.09
## beta[7,1,4] -0.04 0.09 1.48 -2.64 -0.15 2.93 264 1.01
## beta[7,1,5] -0.18 0.30 2.13 -3.79 -0.09 4.00 49 1.06
## beta[7,2,1] -1.69 0.12 1.29 -5.01 -1.48 0.27 116 1.05
## beta[7,2,2] 0.79 0.12 1.84 -2.25 0.46 5.31 234 1.03
## beta[7,2,3] 1.99 0.10 1.53 -0.81 2.00 5.06 216 1.02
## beta[7,2,4] 0.02 0.04 0.62 -1.03 -0.03 1.35 256 1.01
## beta[7,2,5] 0.93 0.13 1.60 -2.71 1.06 3.90 158 1.04
## beta[8,1,1] -0.07 0.11 1.20 -2.51 -0.12 2.54 128 1.03
## beta[8,1,2] -0.22 0.25 2.02 -3.75 -0.24 4.15 67 1.04
## beta[8,1,3] -0.23 0.11 2.19 -4.46 -0.31 4.29 369 1.01
## beta[8,1,4] 0.04 0.04 0.74 -1.37 0.09 1.66 355 1.02
## beta[8,1,5] 0.02 0.09 1.45 -3.56 0.18 2.66 255 1.04
## beta[8,2,1] -0.29 0.06 0.91 -2.39 -0.23 1.43 236 1.03
## beta[8,2,2] 1.60 0.06 1.31 -0.66 1.52 4.46 431 1.01
## beta[8,2,3] 0.41 0.03 0.99 -1.54 0.35 2.36 903 1.00
## beta[8,2,4] -0.26 0.02 0.34 -1.04 -0.24 0.32 239 1.03
## beta[8,2,5] 0.37 0.15 1.16 -1.54 0.30 3.09 64 1.06
## beta[9,1,1] -0.57 0.28 3.63 -7.61 -0.84 7.00 164 1.03
## beta[9,1,2] 0.81 0.34 3.86 -7.08 1.06 7.89 125 1.03
## beta[9,1,3] -0.08 0.21 3.80 -7.62 -0.43 7.45 339 1.01
## beta[9,1,4] 0.12 0.20 3.28 -6.39 0.32 5.82 272 1.01
## beta[9,1,5] 0.09 0.40 3.73 -7.33 0.10 6.37 87 1.04
## beta[9,2,1] 2.02 0.10 1.22 -0.44 1.91 4.44 138 1.05
## beta[9,2,2] 1.54 0.07 1.01 -0.51 1.58 3.56 186 1.04
## beta[9,2,3] 2.25 0.09 1.01 0.11 2.36 4.10 138 1.03
## beta[9,2,4] 0.09 0.13 1.30 -2.68 0.14 2.22 107 1.03
## beta[9,2,5] -2.79 0.31 2.32 -8.18 -2.43 1.00 58 1.11
## gamma0_0[1] 0.03 0.43 4.68 -8.77 -0.36 9.26 117 1.02
## gamma0_0[2] 43.58 0.59 3.57 31.32 44.31 45.59 37 1.15
## gamma0[1,1] -0.27 0.09 1.97 -4.15 -0.35 3.50 470 1.01
## gamma0[1,2] -0.04 0.11 1.96 -3.85 -0.19 3.91 332 1.01
## gamma0[2,1] 0.40 0.05 0.80 -1.42 0.47 1.84 232 1.03
## gamma0[2,2] 3.56 0.14 1.15 0.56 3.78 5.39 69 1.08
## gamma[1,1,1] -0.10 0.14 1.96 -4.38 0.02 3.56 194 1.03
## gamma[1,1,2] -0.32 0.15 1.97 -4.19 -0.50 3.70 174 1.03
## gamma[1,2,1] 0.05 0.05 0.72 -1.31 -0.02 1.58 250 1.01
## gamma[1,2,2] 1.39 0.06 0.79 -0.20 1.47 2.87 166 1.04
## gamma[2,1,1] -0.38 0.12 1.93 -4.05 -0.47 3.62 276 1.02
## gamma[2,1,2] 0.44 0.21 1.97 -3.62 0.51 3.97 88 1.04
## gamma[2,2,1] -0.91 0.05 0.81 -2.47 -0.95 0.74 309 1.01
## gamma[2,2,2] 0.03 0.09 0.92 -1.86 0.09 1.68 101 1.05
## gamma[3,1,1] 0.06 0.12 1.94 -3.49 -0.15 3.98 245 1.04
## gamma[3,1,2] -0.12 0.17 1.96 -4.04 -0.17 3.56 129 1.02
## gamma[3,2,1] 0.07 0.20 1.04 -2.00 0.14 2.06 27 1.10
## gamma[3,2,2] 0.15 0.05 0.86 -1.61 0.20 1.82 355 1.01
## gamma[4,1,1] -0.12 0.14 1.89 -3.51 -0.29 3.68 181 1.04
## gamma[4,1,2] 0.08 0.11 1.89 -3.63 0.19 3.40 270 1.01
## gamma[4,2,1] 0.39 0.06 0.54 -0.75 0.39 1.34 70 1.04
## gamma[4,2,2] 0.06 0.07 0.72 -1.43 0.07 1.27 98 1.03
## gamma[5,1,1] -0.10 0.14 1.89 -4.06 -0.16 3.54 191 1.02
## gamma[5,1,2] -0.09 0.12 1.95 -4.07 0.00 3.53 248 1.02
## gamma[5,2,1] -0.35 0.13 1.13 -2.81 -0.25 1.56 72 1.06
## gamma[5,2,2] -1.31 0.09 1.10 -3.63 -1.19 0.84 153 1.06
## tau0[1] 1.00 0.11 0.98 0.08 0.69 3.64 75 1.05
## tau0[2] 1.44 0.45 2.47 0.10 0.73 9.45 30 1.18
## tau[1] 0.95 0.08 0.64 0.09 0.86 2.46 62 1.08
## tau[2] 1.69 0.07 0.72 0.40 1.65 3.31 120 1.04
## tau[3] 2.16 0.11 0.77 0.88 2.06 3.67 45 1.07
## tau[4] 0.36 0.05 0.34 0.02 0.24 1.29 48 1.05
## tau[5] 0.97 0.10 0.78 0.09 0.80 3.02 59 1.10
##
## Samples were drawn using NUTS(diag_e) at Wed Dec 10 11:58:00 2025.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
# Cek konvergensi global
rhat_range <- range(summary(fit_hier_mix)$summary[, "Rhat"], na.rm = TRUE)
rhat_range # idealnya max < 1.05
## [1] 0.9991604 1.1799666
# ============================================================
# 9. TABEL PARAMETER LEVEL-1 & LEVEL-2 (UNTUK LAPORAN)
# ============================================================
summ_all <- rstan::summary(fit_hier_mix)$summary
# ---- LEVEL 1: alpha, sigma, beta0, beta ----
tab_alpha <- summ_all[grep("^alpha\\[", rownames(summ_all)),
c("mean","sd","2.5%","50%","97.5%")]
tab_sigma <- summ_all[grep("^sigma\\[", rownames(summ_all)),
c("mean","sd","2.5%","50%","97.5%")]
tab_beta0 <- summ_all[grep("^beta0\\[", rownames(summ_all)),
c("mean","sd","2.5%","50%","97.5%")]
tab_beta <- summ_all[grep("^beta\\[", rownames(summ_all)),
c("mean","sd","2.5%","50%","97.5%")]
table_level1 <- rbind(
data.frame(node = rownames(tab_alpha), tab_alpha),
data.frame(node = rownames(tab_sigma), tab_sigma),
data.frame(node = rownames(tab_beta0), tab_beta0),
data.frame(node = rownames(tab_beta), tab_beta)
)
kable(
table_level1, digits = 3,
caption = "Table 4. Estimation Parameters Normal Mixture Hierarchy Model (Level 1)"
)
Table 4. Estimation Parameters Normal Mixture Hierarchy Model
(Level 1)
| alpha[1] |
alpha[1] |
1.853 |
1.923 |
-0.370 |
1.401 |
6.516 |
| alpha[2] |
alpha[2] |
4.533 |
0.782 |
3.101 |
4.459 |
6.358 |
| sigma[1] |
sigma[1] |
1.109 |
1.003 |
0.039 |
0.909 |
3.827 |
| sigma[2] |
sigma[2] |
11.554 |
0.517 |
10.548 |
11.508 |
12.604 |
| beta0[1,1] |
beta0[1,1] |
0.301 |
5.204 |
-9.608 |
-0.369 |
10.988 |
| beta0[1,2] |
beta0[1,2] |
46.771 |
0.974 |
44.750 |
46.879 |
48.664 |
| beta0[2,1] |
beta0[2,1] |
0.280 |
5.238 |
-10.087 |
-0.251 |
10.186 |
| beta0[2,2] |
beta0[2,2] |
41.530 |
0.979 |
39.617 |
41.433 |
43.684 |
| beta0[3,1] |
beta0[3,1] |
-0.007 |
5.454 |
-10.919 |
-0.402 |
10.624 |
| beta0[3,2] |
beta0[3,2] |
40.990 |
0.729 |
39.475 |
41.070 |
42.409 |
| beta0[4,1] |
beta0[4,1] |
0.339 |
5.122 |
-9.805 |
-0.362 |
10.814 |
| beta0[4,2] |
beta0[4,2] |
44.916 |
0.991 |
42.704 |
44.999 |
46.661 |
| beta0[5,1] |
beta0[5,1] |
0.019 |
5.019 |
-9.871 |
-0.341 |
9.730 |
| beta0[5,2] |
beta0[5,2] |
42.633 |
0.863 |
40.916 |
42.541 |
44.487 |
| beta0[6,1] |
beta0[6,1] |
-0.560 |
7.107 |
-14.167 |
-0.968 |
13.539 |
| beta0[6,2] |
beta0[6,2] |
41.738 |
1.354 |
39.000 |
41.919 |
44.263 |
| beta0[7,1] |
beta0[7,1] |
0.045 |
5.163 |
-10.141 |
-0.296 |
10.059 |
| beta0[7,2] |
beta0[7,2] |
41.289 |
0.884 |
39.173 |
41.397 |
42.855 |
| beta0[8,1] |
beta0[8,1] |
0.157 |
4.982 |
-9.251 |
-0.236 |
9.999 |
| beta0[8,2] |
beta0[8,2] |
44.102 |
0.818 |
42.511 |
44.110 |
45.803 |
| beta0[9,1] |
beta0[9,1] |
-0.004 |
5.779 |
-10.620 |
-0.282 |
11.745 |
| beta0[9,2] |
beta0[9,2] |
52.323 |
2.443 |
48.195 |
52.021 |
58.059 |
| beta[1,1,1] |
beta[1,1,1] |
-0.146 |
2.843 |
-5.520 |
-0.412 |
5.585 |
| beta[1,1,2] |
beta[1,1,2] |
0.824 |
3.124 |
-5.312 |
1.011 |
6.426 |
| beta[1,1,3] |
beta[1,1,3] |
-0.383 |
3.520 |
-7.667 |
-0.503 |
6.408 |
| beta[1,1,4] |
beta[1,1,4] |
0.195 |
2.717 |
-5.424 |
0.294 |
4.852 |
| beta[1,1,5] |
beta[1,1,5] |
0.024 |
2.749 |
-5.155 |
0.142 |
5.641 |
| beta[1,2,1] |
beta[1,2,1] |
1.473 |
0.911 |
-0.209 |
1.467 |
3.359 |
| beta[1,2,2] |
beta[1,2,2] |
0.160 |
1.161 |
-2.352 |
0.266 |
2.197 |
| beta[1,2,3] |
beta[1,2,3] |
1.026 |
0.488 |
0.034 |
0.999 |
1.979 |
| beta[1,2,4] |
beta[1,2,4] |
-0.512 |
0.670 |
-2.112 |
-0.456 |
0.618 |
| beta[1,2,5] |
beta[1,2,5] |
-0.315 |
1.123 |
-2.523 |
-0.349 |
2.060 |
| beta[2,1,1] |
beta[2,1,1] |
0.347 |
2.254 |
-4.124 |
0.513 |
4.821 |
| beta[2,1,2] |
beta[2,1,2] |
-0.180 |
2.641 |
-5.066 |
-0.333 |
5.262 |
| beta[2,1,3] |
beta[2,1,3] |
0.034 |
2.949 |
-5.898 |
0.052 |
5.557 |
| beta[2,1,4] |
beta[2,1,4] |
0.016 |
1.916 |
-3.821 |
-0.017 |
3.674 |
| beta[2,1,5] |
beta[2,1,5] |
0.002 |
2.386 |
-4.600 |
-0.021 |
4.698 |
| beta[2,2,1] |
beta[2,2,1] |
-1.007 |
1.275 |
-3.124 |
-1.084 |
1.718 |
| beta[2,2,2] |
beta[2,2,2] |
0.806 |
1.775 |
-2.399 |
0.610 |
4.552 |
| beta[2,2,3] |
beta[2,2,3] |
0.751 |
1.235 |
-1.745 |
0.866 |
3.229 |
| beta[2,2,4] |
beta[2,2,4] |
-0.281 |
0.809 |
-1.637 |
-0.281 |
1.338 |
| beta[2,2,5] |
beta[2,2,5] |
0.948 |
1.716 |
-2.432 |
0.961 |
4.306 |
| beta[3,1,1] |
beta[3,1,1] |
0.332 |
2.501 |
-4.845 |
0.561 |
4.753 |
| beta[3,1,2] |
beta[3,1,2] |
-1.014 |
3.325 |
-7.194 |
-0.731 |
5.148 |
| beta[3,1,3] |
beta[3,1,3] |
0.216 |
2.849 |
-5.192 |
0.269 |
5.780 |
| beta[3,1,4] |
beta[3,1,4] |
-0.144 |
2.224 |
-3.819 |
-0.290 |
4.440 |
| beta[3,1,5] |
beta[3,1,5] |
0.133 |
2.351 |
-4.606 |
-0.102 |
5.170 |
| beta[3,2,1] |
beta[3,2,1] |
-1.579 |
0.814 |
-3.186 |
-1.643 |
0.015 |
| beta[3,2,2] |
beta[3,2,2] |
0.699 |
1.076 |
-1.226 |
0.676 |
2.964 |
| beta[3,2,3] |
beta[3,2,3] |
1.031 |
0.929 |
-0.671 |
0.999 |
2.938 |
| beta[3,2,4] |
beta[3,2,4] |
-0.030 |
0.585 |
-1.132 |
-0.073 |
1.150 |
| beta[3,2,5] |
beta[3,2,5] |
1.046 |
1.230 |
-1.494 |
1.101 |
3.275 |
| beta[4,1,1] |
beta[4,1,1] |
-0.217 |
2.927 |
-5.046 |
-0.187 |
5.391 |
| beta[4,1,2] |
beta[4,1,2] |
0.563 |
2.874 |
-5.228 |
0.702 |
6.208 |
| beta[4,1,3] |
beta[4,1,3] |
-0.025 |
3.188 |
-6.420 |
0.249 |
6.157 |
| beta[4,1,4] |
beta[4,1,4] |
0.174 |
2.362 |
-4.553 |
0.263 |
4.226 |
| beta[4,1,5] |
beta[4,1,5] |
0.109 |
2.623 |
-5.135 |
0.147 |
5.227 |
| beta[4,2,1] |
beta[4,2,1] |
0.050 |
0.884 |
-1.606 |
0.133 |
1.586 |
| beta[4,2,2] |
beta[4,2,2] |
2.280 |
1.014 |
0.413 |
2.143 |
4.359 |
| beta[4,2,3] |
beta[4,2,3] |
2.329 |
1.170 |
0.116 |
2.330 |
4.405 |
| beta[4,2,4] |
beta[4,2,4] |
-0.298 |
0.670 |
-1.446 |
-0.366 |
1.257 |
| beta[4,2,5] |
beta[4,2,5] |
-0.476 |
1.365 |
-3.386 |
-0.515 |
2.182 |
| beta[5,1,1] |
beta[5,1,1] |
0.042 |
1.604 |
-2.798 |
-0.041 |
3.298 |
| beta[5,1,2] |
beta[5,1,2] |
-0.063 |
2.191 |
-4.119 |
-0.241 |
4.669 |
| beta[5,1,3] |
beta[5,1,3] |
0.243 |
2.388 |
-4.426 |
0.155 |
4.875 |
| beta[5,1,4] |
beta[5,1,4] |
0.021 |
1.167 |
-2.284 |
-0.095 |
2.363 |
| beta[5,1,5] |
beta[5,1,5] |
-0.142 |
1.823 |
-3.204 |
0.002 |
3.329 |
| beta[5,2,1] |
beta[5,2,1] |
-1.051 |
1.152 |
-3.754 |
-1.004 |
1.272 |
| beta[5,2,2] |
beta[5,2,2] |
1.495 |
1.222 |
-0.518 |
1.335 |
3.789 |
| beta[5,2,3] |
beta[5,2,3] |
0.372 |
1.162 |
-1.973 |
0.493 |
2.634 |
| beta[5,2,4] |
beta[5,2,4] |
-0.123 |
0.711 |
-1.317 |
-0.161 |
1.714 |
| beta[5,2,5] |
beta[5,2,5] |
0.909 |
1.236 |
-1.495 |
0.870 |
3.680 |
| beta[6,1,1] |
beta[6,1,1] |
-0.128 |
4.857 |
-9.972 |
0.346 |
9.247 |
| beta[6,1,2] |
beta[6,1,2] |
-1.261 |
5.046 |
-10.846 |
-1.534 |
8.652 |
| beta[6,1,3] |
beta[6,1,3] |
0.334 |
5.146 |
-9.417 |
-0.007 |
10.749 |
| beta[6,1,4] |
beta[6,1,4] |
-0.349 |
4.781 |
-8.386 |
-0.489 |
9.130 |
| beta[6,1,5] |
beta[6,1,5] |
0.183 |
4.663 |
-10.206 |
-0.046 |
9.184 |
| beta[6,2,1] |
beta[6,2,1] |
-0.989 |
1.468 |
-3.889 |
-0.841 |
1.983 |
| beta[6,2,2] |
beta[6,2,2] |
-1.464 |
1.415 |
-4.251 |
-1.450 |
1.219 |
| beta[6,2,3] |
beta[6,2,3] |
2.787 |
1.462 |
-0.042 |
2.827 |
5.291 |
| beta[6,2,4] |
beta[6,2,4] |
0.855 |
0.921 |
-1.054 |
0.924 |
2.631 |
| beta[6,2,5] |
beta[6,2,5] |
0.379 |
2.192 |
-4.338 |
0.579 |
4.288 |
| beta[7,1,1] |
beta[7,1,1] |
0.110 |
1.800 |
-3.416 |
0.019 |
3.611 |
| beta[7,1,2] |
beta[7,1,2] |
-0.239 |
2.215 |
-4.622 |
-0.471 |
4.347 |
| beta[7,1,3] |
beta[7,1,3] |
-0.653 |
2.960 |
-6.027 |
-0.526 |
4.875 |
| beta[7,1,4] |
beta[7,1,4] |
-0.035 |
1.478 |
-2.637 |
-0.154 |
2.930 |
| beta[7,1,5] |
beta[7,1,5] |
-0.178 |
2.129 |
-3.787 |
-0.086 |
3.999 |
| beta[7,2,1] |
beta[7,2,1] |
-1.691 |
1.291 |
-5.011 |
-1.481 |
0.270 |
| beta[7,2,2] |
beta[7,2,2] |
0.793 |
1.844 |
-2.248 |
0.460 |
5.311 |
| beta[7,2,3] |
beta[7,2,3] |
1.990 |
1.532 |
-0.808 |
1.995 |
5.062 |
| beta[7,2,4] |
beta[7,2,4] |
0.021 |
0.618 |
-1.030 |
-0.030 |
1.350 |
| beta[7,2,5] |
beta[7,2,5] |
0.931 |
1.597 |
-2.706 |
1.061 |
3.896 |
| beta[8,1,1] |
beta[8,1,1] |
-0.070 |
1.205 |
-2.507 |
-0.117 |
2.542 |
| beta[8,1,2] |
beta[8,1,2] |
-0.223 |
2.018 |
-3.749 |
-0.244 |
4.154 |
| beta[8,1,3] |
beta[8,1,3] |
-0.228 |
2.186 |
-4.459 |
-0.313 |
4.291 |
| beta[8,1,4] |
beta[8,1,4] |
0.040 |
0.737 |
-1.371 |
0.090 |
1.657 |
| beta[8,1,5] |
beta[8,1,5] |
0.018 |
1.453 |
-3.556 |
0.176 |
2.659 |
| beta[8,2,1] |
beta[8,2,1] |
-0.285 |
0.907 |
-2.389 |
-0.234 |
1.431 |
| beta[8,2,2] |
beta[8,2,2] |
1.603 |
1.314 |
-0.657 |
1.520 |
4.459 |
| beta[8,2,3] |
beta[8,2,3] |
0.407 |
0.985 |
-1.544 |
0.351 |
2.360 |
| beta[8,2,4] |
beta[8,2,4] |
-0.259 |
0.338 |
-1.044 |
-0.239 |
0.324 |
| beta[8,2,5] |
beta[8,2,5] |
0.366 |
1.165 |
-1.542 |
0.302 |
3.086 |
| beta[9,1,1] |
beta[9,1,1] |
-0.567 |
3.634 |
-7.609 |
-0.836 |
7.002 |
| beta[9,1,2] |
beta[9,1,2] |
0.812 |
3.861 |
-7.079 |
1.056 |
7.889 |
| beta[9,1,3] |
beta[9,1,3] |
-0.078 |
3.796 |
-7.622 |
-0.425 |
7.448 |
| beta[9,1,4] |
beta[9,1,4] |
0.121 |
3.278 |
-6.387 |
0.321 |
5.821 |
| beta[9,1,5] |
beta[9,1,5] |
0.089 |
3.731 |
-7.328 |
0.099 |
6.366 |
| beta[9,2,1] |
beta[9,2,1] |
2.017 |
1.221 |
-0.442 |
1.906 |
4.441 |
| beta[9,2,2] |
beta[9,2,2] |
1.538 |
1.012 |
-0.511 |
1.582 |
3.561 |
| beta[9,2,3] |
beta[9,2,3] |
2.254 |
1.012 |
0.111 |
2.364 |
4.099 |
| beta[9,2,4] |
beta[9,2,4] |
0.093 |
1.297 |
-2.678 |
0.136 |
2.218 |
| beta[9,2,5] |
beta[9,2,5] |
-2.788 |
2.323 |
-8.176 |
-2.429 |
0.996 |
# ---- LEVEL 2: gamma0_0, gamma0, gamma, tau0, tau ----
tab_gamma0_0 <- summ_all[grep("^gamma0_0\\[", rownames(summ_all)),
c("mean","sd","2.5%","50%","97.5%")]
tab_gamma0 <- summ_all[grep("^gamma0\\[", rownames(summ_all)),
c("mean","sd","2.5%","50%","97.5%")]
tab_gamma <- summ_all[grep("^gamma\\[", rownames(summ_all)),
c("mean","sd","2.5%","50%","97.5%")]
tab_tau0 <- summ_all[grep("^tau0\\[", rownames(summ_all)),
c("mean","sd","2.5%","50%","97.5%")]
tab_tau <- summ_all[grep("^tau\\[", rownames(summ_all)),
c("mean","sd","2.5%","50%","97.5%")]
table_level2 <- rbind(
data.frame(node = rownames(tab_gamma0_0), tab_gamma0_0),
data.frame(node = rownames(tab_gamma0), tab_gamma0),
data.frame(node = rownames(tab_gamma), tab_gamma),
data.frame(node = rownames(tab_tau0), tab_tau0),
data.frame(node = rownames(tab_tau), tab_tau)
)
kable(
table_level2, digits = 3,
caption = "Table 5. Estimation Parameters Normal Mixture Hierarchy Model (Level 2)"
)
Table 5. Estimation Parameters Normal Mixture Hierarchy Model
(Level 2)
| gamma0_0[1] |
gamma0_0[1] |
0.029 |
4.678 |
-8.773 |
-0.364 |
9.264 |
| gamma0_0[2] |
gamma0_0[2] |
43.578 |
3.571 |
31.319 |
44.308 |
45.591 |
| gamma0[1,1] |
gamma0[1,1] |
-0.272 |
1.967 |
-4.149 |
-0.351 |
3.501 |
| gamma0[1,2] |
gamma0[1,2] |
-0.040 |
1.957 |
-3.853 |
-0.190 |
3.906 |
| gamma0[2,1] |
gamma0[2,1] |
0.403 |
0.802 |
-1.416 |
0.474 |
1.843 |
| gamma0[2,2] |
gamma0[2,2] |
3.560 |
1.153 |
0.561 |
3.784 |
5.394 |
| gamma[1,1,1] |
gamma[1,1,1] |
-0.099 |
1.962 |
-4.384 |
0.024 |
3.555 |
| gamma[1,1,2] |
gamma[1,1,2] |
-0.319 |
1.971 |
-4.193 |
-0.500 |
3.698 |
| gamma[1,2,1] |
gamma[1,2,1] |
0.051 |
0.722 |
-1.309 |
-0.023 |
1.580 |
| gamma[1,2,2] |
gamma[1,2,2] |
1.392 |
0.791 |
-0.199 |
1.466 |
2.874 |
| gamma[2,1,1] |
gamma[2,1,1] |
-0.376 |
1.929 |
-4.051 |
-0.470 |
3.617 |
| gamma[2,1,2] |
gamma[2,1,2] |
0.441 |
1.971 |
-3.620 |
0.511 |
3.969 |
| gamma[2,2,1] |
gamma[2,2,1] |
-0.912 |
0.809 |
-2.471 |
-0.946 |
0.736 |
| gamma[2,2,2] |
gamma[2,2,2] |
0.034 |
0.919 |
-1.859 |
0.090 |
1.685 |
| gamma[3,1,1] |
gamma[3,1,1] |
0.056 |
1.941 |
-3.487 |
-0.149 |
3.983 |
| gamma[3,1,2] |
gamma[3,1,2] |
-0.117 |
1.960 |
-4.037 |
-0.173 |
3.557 |
| gamma[3,2,1] |
gamma[3,2,1] |
0.066 |
1.041 |
-1.999 |
0.140 |
2.060 |
| gamma[3,2,2] |
gamma[3,2,2] |
0.153 |
0.862 |
-1.609 |
0.204 |
1.820 |
| gamma[4,1,1] |
gamma[4,1,1] |
-0.124 |
1.891 |
-3.506 |
-0.291 |
3.683 |
| gamma[4,1,2] |
gamma[4,1,2] |
0.079 |
1.887 |
-3.632 |
0.187 |
3.403 |
| gamma[4,2,1] |
gamma[4,2,1] |
0.387 |
0.542 |
-0.746 |
0.395 |
1.338 |
| gamma[4,2,2] |
gamma[4,2,2] |
0.059 |
0.715 |
-1.429 |
0.066 |
1.272 |
| gamma[5,1,1] |
gamma[5,1,1] |
-0.100 |
1.888 |
-4.065 |
-0.155 |
3.543 |
| gamma[5,1,2] |
gamma[5,1,2] |
-0.095 |
1.949 |
-4.069 |
-0.003 |
3.531 |
| gamma[5,2,1] |
gamma[5,2,1] |
-0.347 |
1.134 |
-2.813 |
-0.255 |
1.557 |
| gamma[5,2,2] |
gamma[5,2,2] |
-1.310 |
1.100 |
-3.633 |
-1.187 |
0.836 |
| tau0[1] |
tau0[1] |
0.999 |
0.984 |
0.075 |
0.694 |
3.638 |
| tau0[2] |
tau0[2] |
1.445 |
2.473 |
0.104 |
0.727 |
9.452 |
| tau[1] |
tau[1] |
0.948 |
0.638 |
0.091 |
0.863 |
2.455 |
| tau[2] |
tau[2] |
1.690 |
0.722 |
0.401 |
1.652 |
3.307 |
| tau[3] |
tau[3] |
2.157 |
0.770 |
0.883 |
2.058 |
3.665 |
| tau[4] |
tau[4] |
0.362 |
0.344 |
0.024 |
0.236 |
1.294 |
| tau[5] |
tau[5] |
0.974 |
0.777 |
0.089 |
0.797 |
3.019 |
# ============================================================
# 10. KRITERIA KEBAIKAN MODEL: LOO, WAIC, BIC, Bayes R^2
# ============================================================
# log-likelihood pointwise (dari generated quantities)
log_lik_mat <- extract_log_lik(fit_hier_mix)
# LOO & WAIC
loo_hier <- loo(log_lik_mat)
## Warning: Some Pareto k diagnostic values are too high. See help('pareto-k-diagnostic') for details.
waic_hier <- waic(log_lik_mat)
## Warning:
## 11 (2.9%) p_waic estimates greater than 0.4. We recommend trying loo instead.
loo_hier
##
## Computed from 4000 by 381 log-likelihood matrix.
##
## Estimate SE
## elpd_loo -1288.6 17.6
## p_loo 27.6 3.9
## looic 2577.2 35.3
## ------
## MCSE of elpd_loo is NA.
## MCSE and ESS estimates assume independent draws (r_eff=1).
##
## Pareto k diagnostic values:
## Count Pct. Min. ESS
## (-Inf, 0.7] (good) 368 96.6% 574
## (0.7, 1] (bad) 12 3.1% <NA>
## (1, Inf) (very bad) 1 0.3% <NA>
## See help('pareto-k-diagnostic') for details.
waic_hier
##
## Computed from 4000 by 381 log-likelihood matrix.
##
## Estimate SE
## elpd_waic -1287.2 17.5
## p_waic 26.2 3.6
## waic 2574.4 35.0
##
## 11 (2.9%) p_waic estimates greater than 0.4. We recommend trying loo instead.
# BIC (approx) dengan p_loo sebagai jumlah parameter efektif
logLik_tot <- apply(log_lik_mat, 2, mean) |> sum()
p_eff <- loo_hier$estimates["p_loo", "Estimate"]
BIC_hier <- -2 * logLik_tot + p_eff * log(N)
BIC_hier
## [1] 2708.4
# Bayes R^2 dari y_rep (posterior predictive)
yrep <- rstan::extract(fit_hier_mix)$y_rep # draws x N
Bayes_R2 <- function(y, yrep) {
mu_rep <- apply(yrep, 2, mean)
var_f <- var(mu_rep)
var_r <- mean(apply((yrep - y)^2, 1, var))
var_f / (var_f + var_r)
}
Bayes_R2_val <- Bayes_R2(bayes_dat$Y, yrep)
Bayes_R2_val
## [1] 0.0002727588
# ============================================================
# 11. TRACE PLOT PARAMETER PENTING
# ============================================================
# Konversi ke array mcmc
mcmc_arr <- as.array(fit_hier_mix)
# Pilih parameter kunci untuk dicek trace-nya
params_trace <- c(
"alpha[1]", "alpha[2]",
"sigma[1]", "sigma[2]",
"gamma0_0[1]", "gamma0_0[2]"
)
# Trace plot utama
mcmc_trace(mcmc_arr, pars = params_trace)

# Contoh tambahan: trace untuk salah satu beta dan gamma
mcmc_trace(mcmc_arr, pars = c("beta[1,1,1]", "beta[1,2,1]"))

# mcmc_trace(mcmc_arr, pars = c("gamma[1,1,1]", "gamma[1,2,1]"))