library(tidyverse)
## ── Attaching packages ─────────────────────────────────────── tidyverse 1.3.0 ──
## ✓ ggplot2 3.3.3 ✓ purrr 0.3.4
## ✓ tibble 3.0.5 ✓ dplyr 1.0.3
## ✓ tidyr 1.1.2 ✓ stringr 1.4.0
## ✓ readr 1.4.0 ✓ forcats 0.5.0
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## x dplyr::filter() masks stats::filter()
## x dplyr::lag() masks stats::lag()
set.seed(27)
Q <- 8
A <- 5
alpha <- rep(5, A)
theta_0 <- gtools::rdirichlet(Q, alpha)
theta_1 <- gtools::rdirichlet(Q, alpha)
library(readxl)
Angket_Mahasiswa1 <- read_excel("keagamaan1.xlsx",
col_types = c("numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric"))
Angket_Mahasiswa1
## # A tibble: 54 x 9
## y P01 P02 P03 P04 P05 P06 P07 P08
## <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 0 5 5 5 5 5 5 5 5
## 2 0 5 5 5 4 5 5 5 5
## 3 0 5 5 5 5 5 5 5 5
## 4 0 5 4 4 4 3 5 5 5
## 5 0 5 5 5 3 5 5 5 5
## 6 0 4 4 4 2 2 5 5 5
## 7 0 5 5 5 5 5 5 5 5
## 8 0 5 1 5 5 1 5 5 1
## 9 0 5 5 4 4 4 5 5 4
## 10 0 4 3 4 4 3 4 5 5
## # … with 44 more rows
Angket_Mahasiswa1_convert <- type.convert(Angket_Mahasiswa1)
str(Angket_Mahasiswa1_convert)
## tibble [54 × 9] (S3: tbl_df/tbl/data.frame)
## $ y : int [1:54] 0 0 0 0 0 0 0 0 0 0 ...
## $ P01: int [1:54] 5 5 5 5 5 4 5 5 5 4 ...
## $ P02: int [1:54] 5 5 5 4 5 4 5 1 5 3 ...
## $ P03: int [1:54] 5 5 5 4 5 4 5 5 4 4 ...
## $ P04: int [1:54] 5 4 5 4 3 2 5 5 4 4 ...
## $ P05: int [1:54] 5 5 5 3 5 2 5 1 4 3 ...
## $ P06: int [1:54] 5 5 5 5 5 5 5 5 5 4 ...
## $ P07: int [1:54] 5 5 5 5 5 5 5 5 5 5 ...
## $ P08: int [1:54] 5 5 5 5 5 5 5 1 4 5 ...
library(readxl)
Angket_Mahasiswa2 <- read_excel("keagamaan2.xlsx",
col_types = c("numeric", "numeric", "numeric",
"numeric", "numeric", "numeric",
"numeric", "numeric", "numeric"))
Angket_Mahasiswa2
## # A tibble: 55 x 9
## y P01 P02 P03 P04 P05 P06 P07 P08
## <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 1 5 4 5 5 5 5 5 5
## 2 1 4 4 4 4 3 4 5 4
## 3 1 5 4 5 4 3 4 5 4
## 4 1 4 4 4 4 4 4 4 4
## 5 1 4 4 4 4 4 4 4 4
## 6 1 5 3 5 4 4 5 5 5
## 7 1 5 4 5 5 4 4 5 3
## 8 1 5 5 5 5 5 5 5 5
## 9 1 5 5 5 5 5 5 5 5
## 10 1 4 4 4 4 4 4 4 4
## # … with 45 more rows
Angket_Mahasiswa2_convert <- type.convert(Angket_Mahasiswa2)
str(Angket_Mahasiswa2_convert)
## tibble [55 × 9] (S3: tbl_df/tbl/data.frame)
## $ y : int [1:55] 1 1 1 1 1 1 1 1 1 1 ...
## $ P01: int [1:55] 5 4 5 4 4 5 5 5 5 4 ...
## $ P02: int [1:55] 4 4 4 4 4 3 4 5 5 4 ...
## $ P03: int [1:55] 5 4 5 4 4 5 5 5 5 4 ...
## $ P04: int [1:55] 5 4 4 4 4 4 5 5 5 4 ...
## $ P05: int [1:55] 5 3 3 4 4 4 4 5 5 4 ...
## $ P06: int [1:55] 5 4 4 4 4 5 4 5 5 4 ...
## $ P07: int [1:55] 5 5 5 4 4 5 5 5 5 4 ...
## $ P08: int [1:55] 5 4 4 4 4 5 3 5 5 4 ...
library(rstan)
## Loading required package: StanHeaders
## rstan (Version 2.21.2, GitRev: 2e1f913d3ca3)
## For execution on a local, multicore CPU with excess RAM we recommend calling
## options(mc.cores = parallel::detectCores()).
## To avoid recompilation of unchanged Stan programs, we recommend calling
## rstan_options(auto_write = TRUE)
##
## Attaching package: 'rstan'
## The following object is masked from 'package:tidyr':
##
## extract
rstan_options(auto_write = TRUE)
model <- stan_model("stan_categorical_responses.stan")
new_data <- list(
R = as.matrix(dplyr::select(Angket_Mahasiswa1_convert, -y)),
N = nrow(Angket_Mahasiswa1_convert),
y = Angket_Mahasiswa1_convert$y,
new_R = as.matrix(dplyr::select(Angket_Mahasiswa2_convert, -y)),
new_N = nrow(Angket_Mahasiswa2_convert),
Q = Q,
A = A,
alpha = alpha
)
new_data
## $R
## P01 P02 P03 P04 P05 P06 P07 P08
## [1,] 5 5 5 5 5 5 5 5
## [2,] 5 5 5 4 5 5 5 5
## [3,] 5 5 5 5 5 5 5 5
## [4,] 5 4 4 4 3 5 5 5
## [5,] 5 5 5 3 5 5 5 5
## [6,] 4 4 4 2 2 5 5 5
## [7,] 5 5 5 5 5 5 5 5
## [8,] 5 1 5 5 1 5 5 1
## [9,] 5 5 4 4 4 5 5 4
## [10,] 4 3 4 4 3 4 5 5
## [11,] 5 4 5 5 5 4 5 4
## [12,] 5 5 5 5 5 5 5 5
## [13,] 5 5 5 5 5 5 5 5
## [14,] 5 4 5 3 5 5 5 4
## [15,] 5 4 5 4 3 4 5 4
## [16,] 4 4 4 4 4 4 4 4
## [17,] 5 5 5 5 5 5 5 5
## [18,] 4 3 5 4 3 4 5 4
## [19,] 4 4 4 4 4 5 4 3
## [20,] 5 3 4 3 1 1 5 3
## [21,] 4 4 4 4 4 5 5 5
## [22,] 3 5 5 3 3 3 5 5
## [23,] 4 4 4 4 4 5 5 5
## [24,] 4 4 4 5 3 4 4 4
## [25,] 5 5 5 5 5 5 5 5
## [26,] 5 5 5 4 3 5 5 5
## [27,] 5 4 5 5 5 5 5 5
## [28,] 5 5 5 5 5 5 5 5
## [29,] 4 5 5 4 4 5 5 4
## [30,] 5 3 5 4 3 4 5 5
## [31,] 4 4 5 4 4 4 4 5
## [32,] 5 4 5 5 4 5 5 5
## [33,] 4 4 5 5 5 5 5 5
## [34,] 5 4 5 4 4 5 5 5
## [35,] 4 3 4 4 5 5 5 5
## [36,] 4 3 5 5 5 5 5 4
## [37,] 5 4 4 5 4 4 5 5
## [38,] 5 4 5 4 4 5 5 4
## [39,] 3 1 3 4 3 3 4 3
## [40,] 4 3 4 4 3 4 4 4
## [41,] 5 4 5 5 5 5 5 5
## [42,] 5 5 5 5 5 5 5 5
## [43,] 4 5 5 5 4 5 5 4
## [44,] 4 4 5 4 4 5 5 4
## [45,] 4 4 4 4 4 2 4 2
## [46,] 5 5 5 5 5 5 5 5
## [47,] 5 5 5 5 5 5 5 5
## [48,] 4 4 4 4 4 4 5 4
## [49,] 5 5 5 5 5 5 5 5
## [50,] 4 4 5 4 3 4 4 4
## [51,] 5 4 5 5 5 5 5 5
## [52,] 5 5 5 5 5 5 5 5
## [53,] 5 5 5 5 5 5 5 5
## [54,] 5 4 5 4 4 4 5 5
##
## $N
## [1] 54
##
## $y
## [1] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## [39] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
##
## $new_R
## P01 P02 P03 P04 P05 P06 P07 P08
## [1,] 5 4 5 5 5 5 5 5
## [2,] 4 4 4 4 3 4 5 4
## [3,] 5 4 5 4 3 4 5 4
## [4,] 4 4 4 4 4 4 4 4
## [5,] 4 4 4 4 4 4 4 4
## [6,] 5 3 5 4 4 5 5 5
## [7,] 5 4 5 5 4 4 5 3
## [8,] 5 5 5 5 5 5 5 5
## [9,] 5 5 5 5 5 5 5 5
## [10,] 4 4 4 4 4 4 4 4
## [11,] 5 5 5 5 5 5 5 5
## [12,] 5 4 5 4 4 5 5 5
## [13,] 5 4 5 5 4 5 5 5
## [14,] 4 4 4 4 4 4 4 4
## [15,] 4 4 5 4 3 4 3 4
## [16,] 5 4 5 5 5 5 5 5
## [17,] 5 4 5 4 5 5 5 4
## [18,] 5 5 5 5 5 5 5 5
## [19,] 5 4 5 5 4 5 5 5
## [20,] 5 5 5 5 5 5 5 5
## [21,] 5 5 5 5 4 5 5 5
## [22,] 5 5 5 4 4 5 5 5
## [23,] 4 5 4 4 3 3 5 4
## [24,] 5 5 4 5 4 5 5 5
## [25,] 4 4 4 4 4 4 4 4
## [26,] 5 4 5 4 4 5 5 4
## [27,] 3 3 4 3 3 4 4 3
## [28,] 4 4 4 4 4 5 5 5
## [29,] 5 5 5 5 5 5 5 5
## [30,] 5 5 5 5 5 4 5 5
## [31,] 5 4 5 5 5 5 5 5
## [32,] 5 5 5 5 4 5 5 5
## [33,] 5 5 5 5 5 5 5 5
## [34,] 4 4 4 4 4 4 4 4
## [35,] 4 4 5 4 4 4 4 4
## [36,] 4 4 5 4 4 5 5 5
## [37,] 5 4 5 4 3 5 5 5
## [38,] 5 4 5 5 5 5 5 5
## [39,] 4 4 5 4 4 5 5 5
## [40,] 4 4 5 5 5 5 5 5
## [41,] 5 4 5 3 3 5 5 5
## [42,] 5 5 5 5 5 5 5 5
## [43,] 5 4 5 5 4 5 5 5
## [44,] 4 3 5 5 5 5 4 4
## [45,] 5 4 5 4 4 5 5 5
## [46,] 3 3 3 3 2 3 3 3
## [47,] 4 3 4 4 3 4 4 4
## [48,] 5 4 5 5 5 5 5 5
## [49,] 5 5 5 5 5 5 5 5
## [50,] 5 4 5 5 5 5 5 5
## [51,] 5 3 5 4 5 4 5 5
## [52,] 5 5 5 5 5 4 5 5
## [53,] 5 5 5 5 5 5 5 5
## [54,] 5 5 5 5 5 5 5 5
## [55,] 4 4 5 4 3 4 5 4
##
## $new_N
## [1] 55
##
## $Q
## [1] 8
##
## $A
## [1] 5
##
## $alpha
## [1] 5 5 5 5 5
fit <- sampling(model, new_data , cores = 3, iter = 1000, chains = 8, refresh=0)
fit
## Inference for Stan model: stan_categorical_responses.
## 8 chains, each with iter=1000; warmup=500; thin=1;
## post-warmup draws per chain=500, total post-warmup draws=4000.
##
## mean se_mean sd 2.5% 25% 50% 75% 97.5%
## theta_0[1,1] 0.06 0.00 0.03 0.02 0.04 0.06 0.08 0.12
## theta_0[1,2] 0.06 0.00 0.03 0.02 0.04 0.06 0.08 0.12
## theta_0[1,3] 0.09 0.00 0.03 0.03 0.06 0.08 0.11 0.16
## theta_0[1,4] 0.30 0.00 0.05 0.21 0.27 0.30 0.34 0.41
## theta_0[1,5] 0.48 0.00 0.06 0.37 0.44 0.48 0.52 0.59
## theta_0[2,1] 0.09 0.00 0.03 0.04 0.07 0.08 0.11 0.16
## theta_0[2,2] 0.06 0.00 0.03 0.02 0.04 0.06 0.08 0.13
## theta_0[2,3] 0.15 0.00 0.04 0.08 0.12 0.15 0.18 0.23
## theta_0[2,4] 0.37 0.00 0.05 0.27 0.33 0.37 0.40 0.48
## theta_0[2,5] 0.33 0.00 0.05 0.23 0.29 0.33 0.37 0.44
## theta_0[3,1] 0.06 0.00 0.03 0.02 0.04 0.06 0.08 0.13
## theta_0[3,2] 0.06 0.00 0.03 0.02 0.04 0.06 0.08 0.13
## theta_0[3,3] 0.08 0.00 0.03 0.03 0.05 0.07 0.09 0.14
## theta_0[3,4] 0.25 0.00 0.05 0.16 0.22 0.25 0.28 0.35
## theta_0[3,5] 0.54 0.00 0.06 0.43 0.51 0.54 0.58 0.65
## theta_0[4,1] 0.06 0.00 0.03 0.02 0.04 0.06 0.08 0.12
## theta_0[4,2] 0.08 0.00 0.03 0.03 0.05 0.07 0.09 0.15
## theta_0[4,3] 0.11 0.00 0.04 0.05 0.09 0.11 0.14 0.19
## theta_0[4,4] 0.37 0.00 0.05 0.27 0.33 0.37 0.40 0.48
## theta_0[4,5] 0.38 0.00 0.05 0.27 0.34 0.38 0.41 0.49
## theta_0[5,1] 0.09 0.00 0.03 0.04 0.07 0.09 0.11 0.16
## theta_0[5,2] 0.08 0.00 0.03 0.03 0.05 0.07 0.09 0.14
## theta_0[5,3] 0.20 0.00 0.05 0.12 0.17 0.20 0.23 0.30
## theta_0[5,4] 0.27 0.00 0.05 0.17 0.23 0.26 0.30 0.37
## theta_0[5,5] 0.37 0.00 0.05 0.27 0.33 0.37 0.40 0.47
## theta_0[6,1] 0.08 0.00 0.03 0.03 0.06 0.07 0.09 0.14
## theta_0[6,2] 0.08 0.00 0.03 0.03 0.05 0.07 0.09 0.14
## theta_0[6,3] 0.09 0.00 0.03 0.04 0.07 0.09 0.11 0.16
## theta_0[6,4] 0.23 0.00 0.05 0.14 0.19 0.22 0.26 0.33
## theta_0[6,5] 0.53 0.00 0.06 0.42 0.49 0.53 0.57 0.64
## theta_0[7,1] 0.06 0.00 0.03 0.02 0.04 0.06 0.08 0.13
## theta_0[7,2] 0.06 0.00 0.03 0.02 0.04 0.06 0.08 0.13
## theta_0[7,3] 0.06 0.00 0.03 0.02 0.04 0.06 0.08 0.13
## theta_0[7,4] 0.16 0.00 0.04 0.09 0.14 0.16 0.19 0.25
## theta_0[7,5] 0.65 0.00 0.05 0.54 0.61 0.65 0.68 0.75
## theta_0[8,1] 0.08 0.00 0.03 0.03 0.05 0.07 0.09 0.14
## theta_0[8,2] 0.08 0.00 0.03 0.03 0.05 0.07 0.09 0.14
## theta_0[8,3] 0.10 0.00 0.03 0.04 0.08 0.10 0.12 0.18
## theta_0[8,4] 0.25 0.00 0.05 0.16 0.22 0.25 0.28 0.35
## theta_0[8,5] 0.49 0.00 0.06 0.38 0.46 0.49 0.53 0.60
## theta_1[1,1] 0.20 0.00 0.08 0.07 0.14 0.20 0.25 0.38
## theta_1[1,2] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[1,3] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[1,4] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[1,5] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[2,1] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[2,2] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.39
## theta_1[2,3] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[2,4] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[2,5] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[3,1] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[3,2] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[3,3] 0.20 0.00 0.08 0.08 0.14 0.19 0.25 0.37
## theta_1[3,4] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[3,5] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[4,1] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[4,2] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[4,3] 0.20 0.00 0.08 0.08 0.14 0.19 0.25 0.37
## theta_1[4,4] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[4,5] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[5,1] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[5,2] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[5,3] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[5,4] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[5,5] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[6,1] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[6,2] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[6,3] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[6,4] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[6,5] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[7,1] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[7,2] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[7,3] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[7,4] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.38
## theta_1[7,5] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[8,1] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[8,2] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[8,3] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[8,4] 0.20 0.00 0.08 0.07 0.14 0.19 0.25 0.37
## theta_1[8,5] 0.20 0.00 0.08 0.07 0.14 0.19 0.24 0.38
## pred[1] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[2] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[3] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[4] 0.99 0.00 0.01 0.97 0.99 1.00 1.00 1.00
## pred[5] 0.99 0.00 0.01 0.97 0.99 1.00 1.00 1.00
## pred[6] 0.90 0.00 0.12 0.55 0.87 0.94 0.98 1.00
## pred[7] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[8] 0.87 0.00 0.14 0.45 0.83 0.92 0.97 1.00
## pred[9] 0.99 0.00 0.01 0.96 0.99 1.00 1.00 1.00
## pred[10] 0.95 0.00 0.06 0.78 0.94 0.98 0.99 1.00
## pred[11] 0.99 0.00 0.01 0.97 0.99 1.00 1.00 1.00
## pred[12] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[13] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[14] 0.99 0.00 0.01 0.95 0.99 1.00 1.00 1.00
## pred[15] 0.99 0.00 0.02 0.94 0.99 0.99 1.00 1.00
## pred[16] 0.90 0.00 0.11 0.58 0.88 0.94 0.98 1.00
## pred[17] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[18] 0.96 0.00 0.06 0.80 0.95 0.98 0.99 1.00
## pred[19] 0.89 0.00 0.12 0.54 0.86 0.94 0.97 1.00
## pred[20] 0.38 0.00 0.25 0.03 0.16 0.34 0.58 0.90
## pred[21] 0.99 0.00 0.01 0.97 0.99 1.00 1.00 1.00
## pred[22] 0.79 0.00 0.19 0.28 0.69 0.86 0.94 0.99
## pred[23] 0.99 0.00 0.01 0.97 0.99 1.00 1.00 1.00
## pred[24] 0.88 0.00 0.12 0.55 0.84 0.93 0.97 0.99
## pred[25] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[26] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[27] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[28] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[29] 0.99 0.00 0.01 0.97 0.99 1.00 1.00 1.00
## pred[30] 0.99 0.00 0.02 0.93 0.98 0.99 1.00 1.00
## pred[31] 0.97 0.00 0.04 0.86 0.97 0.99 0.99 1.00
## pred[32] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[33] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[34] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[35] 0.99 0.00 0.02 0.94 0.99 0.99 1.00 1.00
## pred[36] 0.99 0.00 0.01 0.95 0.99 1.00 1.00 1.00
## pred[37] 0.99 0.00 0.01 0.95 0.99 1.00 1.00 1.00
## pred[38] 1.00 0.00 0.01 0.98 1.00 1.00 1.00 1.00
## pred[39] 0.07 0.00 0.11 0.00 0.01 0.03 0.08 0.40
## pred[40] 0.78 0.00 0.19 0.28 0.68 0.83 0.93 0.99
## pred[41] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[42] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[43] 0.99 0.00 0.01 0.97 0.99 1.00 1.00 1.00
## pred[44] 0.99 0.00 0.01 0.97 0.99 1.00 1.00 1.00
## pred[45] 0.57 0.00 0.25 0.09 0.37 0.60 0.79 0.96
## pred[46] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[47] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[48] 0.97 0.00 0.04 0.85 0.97 0.99 0.99 1.00
## pred[49] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[50] 0.94 0.00 0.08 0.71 0.92 0.96 0.99 1.00
## pred[51] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[52] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[53] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## pred[54] 1.00 0.00 0.01 0.98 1.00 1.00 1.00 1.00
## new_pred[1] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[2] 0.96 0.00 0.05 0.82 0.96 0.98 0.99 1.00
## new_pred[3] 0.99 0.00 0.02 0.94 0.99 0.99 1.00 1.00
## new_pred[4] 0.90 0.00 0.11 0.58 0.88 0.94 0.98 1.00
## new_pred[5] 0.90 0.00 0.11 0.58 0.88 0.94 0.98 1.00
## new_pred[6] 1.00 0.00 0.01 0.98 0.99 1.00 1.00 1.00
## new_pred[7] 0.98 0.00 0.03 0.89 0.98 0.99 1.00 1.00
## new_pred[8] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[9] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[10] 0.90 0.00 0.11 0.58 0.88 0.94 0.98 1.00
## new_pred[11] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[12] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[13] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[14] 0.90 0.00 0.11 0.58 0.88 0.94 0.98 1.00
## new_pred[15] 0.86 0.00 0.15 0.44 0.80 0.91 0.96 0.99
## new_pred[16] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[17] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[18] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[19] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[20] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[21] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[22] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[23] 0.90 0.00 0.11 0.60 0.88 0.94 0.98 1.00
## new_pred[24] 1.00 0.00 0.01 0.98 0.99 1.00 1.00 1.00
## new_pred[25] 0.90 0.00 0.11 0.58 0.88 0.94 0.98 1.00
## new_pred[26] 1.00 0.00 0.01 0.98 1.00 1.00 1.00 1.00
## new_pred[27] 0.21 0.00 0.20 0.01 0.06 0.14 0.30 0.76
## new_pred[28] 0.99 0.00 0.01 0.97 0.99 1.00 1.00 1.00
## new_pred[29] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[30] 1.00 0.00 0.01 0.98 1.00 1.00 1.00 1.00
## new_pred[31] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[32] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[33] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[34] 0.90 0.00 0.11 0.58 0.88 0.94 0.98 1.00
## new_pred[35] 0.95 0.00 0.07 0.76 0.94 0.97 0.99 1.00
## new_pred[36] 1.00 0.00 0.01 0.99 1.00 1.00 1.00 1.00
## new_pred[37] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[38] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[39] 1.00 0.00 0.01 0.99 1.00 1.00 1.00 1.00
## new_pred[40] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[41] 0.99 0.00 0.01 0.96 0.99 1.00 1.00 1.00
## new_pred[42] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[43] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[44] 0.96 0.00 0.05 0.82 0.95 0.98 0.99 1.00
## new_pred[45] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[46] 0.01 0.00 0.02 0.00 0.00 0.00 0.01 0.05
## new_pred[47] 0.78 0.00 0.19 0.28 0.68 0.83 0.93 0.99
## new_pred[48] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[49] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[50] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[51] 0.99 0.00 0.01 0.96 0.99 1.00 1.00 1.00
## new_pred[52] 1.00 0.00 0.01 0.98 1.00 1.00 1.00 1.00
## new_pred[53] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[54] 1.00 0.00 0.00 0.99 1.00 1.00 1.00 1.00
## new_pred[55] 0.98 0.00 0.03 0.91 0.98 0.99 1.00 1.00
## lp__ -1178.05 0.15 5.77 -1190.19 -1181.76 -1177.67 -1174.14 -1167.74
## n_eff Rhat
## theta_0[1,1] 5538 1
## theta_0[1,2] 5588 1
## theta_0[1,3] 5377 1
## theta_0[1,4] 5377 1
## theta_0[1,5] 6130 1
## theta_0[2,1] 5099 1
## theta_0[2,2] 5133 1
## theta_0[2,3] 5239 1
## theta_0[2,4] 5057 1
## theta_0[2,5] 5304 1
## theta_0[3,1] 5751 1
## theta_0[3,2] 4828 1
## theta_0[3,3] 5536 1
## theta_0[3,4] 5100 1
## theta_0[3,5] 5182 1
## theta_0[4,1] 5746 1
## theta_0[4,2] 5339 1
## theta_0[4,3] 5369 1
## theta_0[4,4] 6325 1
## theta_0[4,5] 5999 1
## theta_0[5,1] 5056 1
## theta_0[5,2] 4519 1
## theta_0[5,3] 5073 1
## theta_0[5,4] 5266 1
## theta_0[5,5] 5357 1
## theta_0[6,1] 5726 1
## theta_0[6,2] 5752 1
## theta_0[6,3] 5965 1
## theta_0[6,4] 6056 1
## theta_0[6,5] 6237 1
## theta_0[7,1] 6155 1
## theta_0[7,2] 5294 1
## theta_0[7,3] 5724 1
## theta_0[7,4] 5160 1
## theta_0[7,5] 5204 1
## theta_0[8,1] 5360 1
## theta_0[8,2] 5695 1
## theta_0[8,3] 5157 1
## theta_0[8,4] 5401 1
## theta_0[8,5] 5728 1
## theta_1[1,1] 5907 1
## theta_1[1,2] 5421 1
## theta_1[1,3] 5830 1
## theta_1[1,4] 5006 1
## theta_1[1,5] 4951 1
## theta_1[2,1] 5010 1
## theta_1[2,2] 6754 1
## theta_1[2,3] 4920 1
## theta_1[2,4] 4384 1
## theta_1[2,5] 5459 1
## theta_1[3,1] 5618 1
## theta_1[3,2] 4672 1
## theta_1[3,3] 5295 1
## theta_1[3,4] 5201 1
## theta_1[3,5] 5303 1
## theta_1[4,1] 4830 1
## theta_1[4,2] 6199 1
## theta_1[4,3] 5004 1
## theta_1[4,4] 4704 1
## theta_1[4,5] 5445 1
## theta_1[5,1] 5536 1
## theta_1[5,2] 6131 1
## theta_1[5,3] 5096 1
## theta_1[5,4] 4959 1
## theta_1[5,5] 4923 1
## theta_1[6,1] 5527 1
## theta_1[6,2] 4758 1
## theta_1[6,3] 4551 1
## theta_1[6,4] 5623 1
## theta_1[6,5] 5796 1
## theta_1[7,1] 5495 1
## theta_1[7,2] 5318 1
## theta_1[7,3] 5724 1
## theta_1[7,4] 6491 1
## theta_1[7,5] 5045 1
## theta_1[8,1] 5330 1
## theta_1[8,2] 6201 1
## theta_1[8,3] 5333 1
## theta_1[8,4] 4932 1
## theta_1[8,5] 4887 1
## pred[1] 3882 1
## pred[2] 3602 1
## pred[3] 3882 1
## pred[4] 3019 1
## pred[5] 3598 1
## pred[6] 3520 1
## pred[7] 3882 1
## pred[8] 3982 1
## pred[9] 3578 1
## pred[10] 3561 1
## pred[11] 3032 1
## pred[12] 3882 1
## pred[13] 3882 1
## pred[14] 3149 1
## pred[15] 3066 1
## pred[16] 3685 1
## pred[17] 3882 1
## pred[18] 3447 1
## pred[19] 3553 1
## pred[20] 4261 1
## pred[21] 3155 1
## pred[22] 4312 1
## pred[23] 3155 1
## pred[24] 4190 1
## pred[25] 3882 1
## pred[26] 3551 1
## pred[27] 3486 1
## pred[28] 3882 1
## pred[29] 3830 1
## pred[30] 3220 1
## pred[31] 3665 1
## pred[32] 3072 1
## pred[33] 3406 1
## pred[34] 2993 1
## pred[35] 3039 1
## pred[36] 3350 1
## pred[37] 3656 1
## pred[38] 2662 1
## pred[39] 3682 1
## pred[40] 4689 1
## pred[41] 3486 1
## pred[42] 3882 1
## pred[43] 2625 1
## pred[44] 2494 1
## pred[45] 4872 1
## pred[46] 3882 1
## pred[47] 3882 1
## pred[48] 3013 1
## pred[49] 3882 1
## pred[50] 3776 1
## pred[51] 3486 1
## pred[52] 3882 1
## pred[53] 3882 1
## pred[54] 3391 1
## new_pred[1] 3486 1
## new_pred[2] 3035 1
## new_pred[3] 3066 1
## new_pred[4] 3685 1
## new_pred[5] 3685 1
## new_pred[6] 3206 1
## new_pred[7] 3260 1
## new_pred[8] 3882 1
## new_pred[9] 3882 1
## new_pred[10] 3685 1
## new_pred[11] 3882 1
## new_pred[12] 2993 1
## new_pred[13] 3072 1
## new_pred[14] 3685 1
## new_pred[15] 3827 1
## new_pred[16] 3486 1
## new_pred[17] 3558 1
## new_pred[18] 3882 1
## new_pred[19] 3072 1
## new_pred[20] 3882 1
## new_pred[21] 3013 1
## new_pred[22] 3315 1
## new_pred[23] 3809 1
## new_pred[24] 3042 1
## new_pred[25] 3685 1
## new_pred[26] 2662 1
## new_pred[27] 3819 1
## new_pred[28] 3155 1
## new_pred[29] 3882 1
## new_pred[30] 3453 1
## new_pred[31] 3486 1
## new_pred[32] 3013 1
## new_pred[33] 3882 1
## new_pred[34] 3685 1
## new_pred[35] 3659 1
## new_pred[36] 3197 1
## new_pred[37] 3442 1
## new_pred[38] 3486 1
## new_pred[39] 3197 1
## new_pred[40] 3406 1
## new_pred[41] 3156 1
## new_pred[42] 3882 1
## new_pred[43] 3072 1
## new_pred[44] 3722 1
## new_pred[45] 2993 1
## new_pred[46] 3290 1
## new_pred[47] 4689 1
## new_pred[48] 3486 1
## new_pred[49] 3882 1
## new_pred[50] 3486 1
## new_pred[51] 2970 1
## new_pred[52] 3453 1
## new_pred[53] 3882 1
## new_pred[54] 3882 1
## new_pred[55] 3098 1
## lp__ 1567 1
##
## Samples were drawn using NUTS(diag_e) at Mon Feb 1 12:17:26 2021.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
traceplot(fit)
## 'pars' not specified. Showing first 10 parameters by default.
library(bayesplot)
## This is bayesplot version 1.8.0
## - Online documentation and vignettes at mc-stan.org/bayesplot
## - bayesplot theme set to bayesplot::theme_default()
## * Does _not_ affect other ggplot2 plots
## * See ?bayesplot_theme_set for details on theme setting
library(tidybayes)
print(fit, pars = "theta_0")
## Inference for Stan model: stan_categorical_responses.
## 8 chains, each with iter=1000; warmup=500; thin=1;
## post-warmup draws per chain=500, total post-warmup draws=4000.
##
## mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
## theta_0[1,1] 0.06 0 0.03 0.02 0.04 0.06 0.08 0.12 5538 1
## theta_0[1,2] 0.06 0 0.03 0.02 0.04 0.06 0.08 0.12 5588 1
## theta_0[1,3] 0.09 0 0.03 0.03 0.06 0.08 0.11 0.16 5377 1
## theta_0[1,4] 0.30 0 0.05 0.21 0.27 0.30 0.34 0.41 5377 1
## theta_0[1,5] 0.48 0 0.06 0.37 0.44 0.48 0.52 0.59 6130 1
## theta_0[2,1] 0.09 0 0.03 0.04 0.07 0.08 0.11 0.16 5099 1
## theta_0[2,2] 0.06 0 0.03 0.02 0.04 0.06 0.08 0.13 5133 1
## theta_0[2,3] 0.15 0 0.04 0.08 0.12 0.15 0.18 0.23 5239 1
## theta_0[2,4] 0.37 0 0.05 0.27 0.33 0.37 0.40 0.48 5057 1
## theta_0[2,5] 0.33 0 0.05 0.23 0.29 0.33 0.37 0.44 5304 1
## theta_0[3,1] 0.06 0 0.03 0.02 0.04 0.06 0.08 0.13 5751 1
## theta_0[3,2] 0.06 0 0.03 0.02 0.04 0.06 0.08 0.13 4828 1
## theta_0[3,3] 0.08 0 0.03 0.03 0.05 0.07 0.09 0.14 5536 1
## theta_0[3,4] 0.25 0 0.05 0.16 0.22 0.25 0.28 0.35 5100 1
## theta_0[3,5] 0.54 0 0.06 0.43 0.51 0.54 0.58 0.65 5182 1
## theta_0[4,1] 0.06 0 0.03 0.02 0.04 0.06 0.08 0.12 5746 1
## theta_0[4,2] 0.08 0 0.03 0.03 0.05 0.07 0.09 0.15 5339 1
## theta_0[4,3] 0.11 0 0.04 0.05 0.09 0.11 0.14 0.19 5369 1
## theta_0[4,4] 0.37 0 0.05 0.27 0.33 0.37 0.40 0.48 6325 1
## theta_0[4,5] 0.38 0 0.05 0.27 0.34 0.38 0.41 0.49 5999 1
## theta_0[5,1] 0.09 0 0.03 0.04 0.07 0.09 0.11 0.16 5056 1
## theta_0[5,2] 0.08 0 0.03 0.03 0.05 0.07 0.09 0.14 4519 1
## theta_0[5,3] 0.20 0 0.05 0.12 0.17 0.20 0.23 0.30 5073 1
## theta_0[5,4] 0.27 0 0.05 0.17 0.23 0.26 0.30 0.37 5266 1
## theta_0[5,5] 0.37 0 0.05 0.27 0.33 0.37 0.40 0.47 5357 1
## theta_0[6,1] 0.08 0 0.03 0.03 0.06 0.07 0.09 0.14 5726 1
## theta_0[6,2] 0.08 0 0.03 0.03 0.05 0.07 0.09 0.14 5752 1
## theta_0[6,3] 0.09 0 0.03 0.04 0.07 0.09 0.11 0.16 5965 1
## theta_0[6,4] 0.23 0 0.05 0.14 0.19 0.22 0.26 0.33 6056 1
## theta_0[6,5] 0.53 0 0.06 0.42 0.49 0.53 0.57 0.64 6237 1
## theta_0[7,1] 0.06 0 0.03 0.02 0.04 0.06 0.08 0.13 6155 1
## theta_0[7,2] 0.06 0 0.03 0.02 0.04 0.06 0.08 0.13 5294 1
## theta_0[7,3] 0.06 0 0.03 0.02 0.04 0.06 0.08 0.13 5724 1
## theta_0[7,4] 0.16 0 0.04 0.09 0.14 0.16 0.19 0.25 5160 1
## theta_0[7,5] 0.65 0 0.05 0.54 0.61 0.65 0.68 0.75 5204 1
## theta_0[8,1] 0.08 0 0.03 0.03 0.05 0.07 0.09 0.14 5360 1
## theta_0[8,2] 0.08 0 0.03 0.03 0.05 0.07 0.09 0.14 5695 1
## theta_0[8,3] 0.10 0 0.03 0.04 0.08 0.10 0.12 0.18 5157 1
## theta_0[8,4] 0.25 0 0.05 0.16 0.22 0.25 0.28 0.35 5401 1
## theta_0[8,5] 0.49 0 0.06 0.38 0.46 0.49 0.53 0.60 5728 1
##
## Samples were drawn using NUTS(diag_e) at Mon Feb 1 12:17:26 2021.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
print(fit, pars = "theta_1")
## Inference for Stan model: stan_categorical_responses.
## 8 chains, each with iter=1000; warmup=500; thin=1;
## post-warmup draws per chain=500, total post-warmup draws=4000.
##
## mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
## theta_1[1,1] 0.2 0 0.08 0.07 0.14 0.20 0.25 0.38 5907 1
## theta_1[1,2] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5421 1
## theta_1[1,3] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 5830 1
## theta_1[1,4] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5006 1
## theta_1[1,5] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 4951 1
## theta_1[2,1] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5010 1
## theta_1[2,2] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.39 6754 1
## theta_1[2,3] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 4920 1
## theta_1[2,4] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 4384 1
## theta_1[2,5] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 5459 1
## theta_1[3,1] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5618 1
## theta_1[3,2] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 4672 1
## theta_1[3,3] 0.2 0 0.08 0.08 0.14 0.19 0.25 0.37 5295 1
## theta_1[3,4] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 5201 1
## theta_1[3,5] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 5303 1
## theta_1[4,1] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 4830 1
## theta_1[4,2] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 6199 1
## theta_1[4,3] 0.2 0 0.08 0.08 0.14 0.19 0.25 0.37 5004 1
## theta_1[4,4] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 4704 1
## theta_1[4,5] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 5445 1
## theta_1[5,1] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5536 1
## theta_1[5,2] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 6131 1
## theta_1[5,3] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5096 1
## theta_1[5,4] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 4959 1
## theta_1[5,5] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 4923 1
## theta_1[6,1] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5527 1
## theta_1[6,2] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 4758 1
## theta_1[6,3] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 4551 1
## theta_1[6,4] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5623 1
## theta_1[6,5] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5796 1
## theta_1[7,1] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 5495 1
## theta_1[7,2] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5318 1
## theta_1[7,3] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5724 1
## theta_1[7,4] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.38 6491 1
## theta_1[7,5] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5045 1
## theta_1[8,1] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5330 1
## theta_1[8,2] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 6201 1
## theta_1[8,3] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 5333 1
## theta_1[8,4] 0.2 0 0.08 0.07 0.14 0.19 0.25 0.37 4932 1
## theta_1[8,5] 0.2 0 0.08 0.07 0.14 0.19 0.24 0.38 4887 1
##
## Samples were drawn using NUTS(diag_e) at Mon Feb 1 12:17:26 2021.
## For each parameter, n_eff is a crude measure of effective sample size,
## and Rhat is the potential scale reduction factor on split chains (at
## convergence, Rhat=1).
pred <- fit %>%
spread_draws(new_pred[i]) %>%
median_qi(new_pred[i]) %>%
mutate(.pred = if_else(`new_pred[i]` > 0.5, 0, 1))
acc <- round(mean(pred$.pred == Angket_Mahasiswa2_convert$y), 2)
cat("The classification accuracy is:", acc)
## The classification accuracy is: 0
theta_0_draws <- fit %>%
spread_draws(theta_0[i, j])
theta_1_draws <- fit %>%
spread_draws(theta_1[i, j])
theta_draws <- theta_0_draws %>%
left_join(theta_1_draws)
## Joining, by = c("i", "j", ".chain", ".iteration", ".draw")
theta_draws %>%
gather(group, theta, theta_0, theta_1) %>%
mutate(
group = if_else(group == "theta_0", "Group 0", "Group 1"),
question = i,
response = j
) %>%
ggplot(aes(theta, fill = group, color = group)) +
geom_density(alpha = 0.5) +
facet_grid(
rows = vars(question),
cols = vars(response)
) +
labs(
title = "Estimated probability of each response by question and group",
subtitle = "Columns correspond to response, rows to questions",
x = "Probability of response") +
theme(
legend.position = "none",
axis.text.y = element_blank(),
axis.ticks.y = element_blank(),
axis.text.x = element_text(angle = 45, vjust = 1, hjust=1))+
theme(panel.grid = element_blank(),
panel.background = element_rect(fill="white"))
shinystan::launch_shinystan(fit)
##
## Launching ShinyStan interface... for large models this may take some time.
## Loading required package: shiny
##
## Listening on http://127.0.0.1:5014
daftar pustaka : https://rpubs.com/suhartono-uinmaliki/respon