setwd("/Users/koyo/Dropbox/000078_CSKAKEN/04_NRT/SY2018")
nrt.all <- read.csv("nrt.csv", fileEncoding = "Shift_JIS")
setwd("/Users/koyo/Dropbox/000078_CSKAKEN/190700_Anal")
nrt.shak <- nrt.all %>%
dplyr::select(c("renban", "sho.sid",
"shak.ss.1415",
"shak.ss.1516",
"shak.ss.1617",
"shak.ss.1718" #6-7年生追加
))
colnames(nrt.shak) <- c("renban", "sid",
"g34.shak",
"g45.shak",
"g56.shak",
"g67.shak"
)
nrt.shak <- nrt.shak%>%
mutate(id = row_number())
# write.csv(nrt.shak, "nrt_shak.csv")
library(psych)
g34.ds <- describe(nrt.shak$g34.shak)
g45.ds <- describe(nrt.shak$g45.shak)
g56.ds <- describe(nrt.shak$g56.shak)
g67.ds <- describe(nrt.shak$g67.shak)
#平均差と分散比
d45<- matrix(c(g45.ds[,3] - g34.ds[,3], g45.ds[,4]^2 / g34.ds[,4]^2), nrow=1, ncol=2)
d56 <- matrix(c(g56.ds[,3] - g45.ds[,3], g56.ds[,4]^2 / g45.ds[,4]^2), nrow=1, ncol=2)
d67 <- matrix(c(g67.ds[,3] - g56.ds[,3], g67.ds[,4]^2 / g56.ds[,4]^2), nrow=1, ncol=2)
diff.1 <- rbind(d45, d56, d67)
colnames(diff.1) <- c("M.Diff", "V.Ratio")
rownames(diff.1) <- c("d45", "d56", "d67")
diff.1
## M.Diff V.Ratio
## d45 -0.5099369 0.8925809
## d56 0.1868797 1.0660894
## d67 0.3005566 1.0490981
sd4 <- g34.ds[,4]
sd5 <- g45.ds[,4]
sd6 <- g56.ds[,4]
sd7 <- g67.ds[,4]
sd.1 <- matrix(c(sd4, sd5, sd6, sd7), nrow=4, ncol=1)
colnames(sd.1) <- c("sd")
rownames(sd.1) <- c("3-4","4-5","5-6","6-7")
sd.1
## sd
## 3-4 10.85233
## 4-5 10.25291
## 5-6 10.58629
## 6-7 10.84306
sd.ratio45 <- sd.1[2]/ sd.1[1]
sd.ratio56 <- sd.1[3]/ sd.1[2]
sd.ratio67 <- sd.1[4]/ sd.1[3]
sd.ratio1 <- matrix(c(sd.ratio45, sd.ratio56, sd.ratio67), nrow=3, ncol=1)
colnames(sd.ratio1) <- c("sd.rario")
rownames(sd.ratio1) <- c("4-5","5-6", "6-7")
sd.ratio1
## sd.rario
## 4-5 0.944765
## 5-6 1.032516
## 6-7 1.024255
g34.mean <- nrt.shak[c("sid", "g34.shak")] %>% na.omit() %>% group_by(sid) %>% summarise(avg.34 = mean(g34.shak))
g34.sd <- nrt.shak[c("sid", "g34.shak")] %>% na.omit() %>% group_by(sid) %>% summarise(sd.34 = sd(g34.shak))
g34.msd <-data.frame(dplyr::inner_join(g34.mean, g34.sd, by = "sid"))
g45.mean <- nrt.shak[c("sid", "g45.shak")] %>% na.omit() %>% group_by(sid) %>% summarise(avg.45 = mean(g45.shak))
g45.sd <- nrt.shak[c("sid", "g45.shak")] %>% na.omit() %>% group_by(sid) %>% summarise(sd.45 = sd(g45.shak))
g45.msd <-data.frame(dplyr::inner_join(g45.mean, g45.sd, by = "sid"))
g56.mean <- nrt.shak[c("sid", "g56.shak")] %>% na.omit() %>% group_by(sid) %>% summarise(avg.56 = mean(g56.shak))
g56.sd <- nrt.shak[c("sid", "g56.shak")] %>% na.omit() %>% group_by(sid) %>% summarise(sd.56 = sd(g56.shak))
g56.msd <-data.frame(dplyr::inner_join(g56.mean, g56.sd, by = "sid"))
g67.mean <- nrt.shak[c("sid", "g67.shak")] %>% na.omit() %>% group_by(sid) %>% summarise(avg.67 = mean(g67.shak))
g67.sd <- nrt.shak[c("sid", "g67.shak")] %>% na.omit() %>% group_by(sid) %>% summarise(sd.67 = sd(g67.shak))
g67.msd <-data.frame(dplyr::inner_join(g67.mean, g67.sd, by = "sid"))
g345.msd <- data.frame(dplyr::full_join(g34.msd, g45.msd, by = "sid"))
g3456.msd <- data.frame(dplyr::full_join(g345.msd, g56.msd, by = "sid"))
g37.msd <- data.frame(dplyr::full_join(g3456.msd, g67.msd, by = "sid"))
g37.msd <- g37.msd %>% dplyr::mutate(sdr.45 = sd.45 / sd.34)
g37.msd <- g37.msd %>% dplyr::mutate(sdr.56 = sd.56 / sd.45)
g37.msd <- g37.msd %>% dplyr::mutate(sdr.67 = sd.67 / sd.56)
#head(g37.msd)
hist(g37.msd$sdr.45, breaks=seq(0,4,0.2), main="sdr.45", xlab="sd_ato / sd_mae", ylim=c(0,120), xlim=c(0,4))
hist(g37.msd$sdr.56, breaks=seq(0,4,0.2), main="sdr.56", xlab="sd_ato / sd_mae", ylim=c(0,120), xlim=c(0,4))
hist(g37.msd$sdr.67, breaks=seq(0,4,0.2), main="sdr.67", xlab="sd_ato / sd_mae", ylim=c(0,120), xlim=c(0,4))
plot(g34.msd$avg.34, g34.msd$sd.34, xlim = c(30, 70), ylim = c(0,20), main = "Correl of SS and SD (g34)")
plot(g45.msd$avg.45, g45.msd$sd.45, xlim = c(30, 70), ylim = c(0,20), main = "Correl of SS and SD (g45)")
plot(g56.msd$avg.56, g56.msd$sd.56, xlim = c(30, 70), ylim = c(0,20), main = "Correl of SS and SD (g56)")
plot(g67.msd$avg.67, g67.msd$sd.67, xlim = c(30, 70), ylim = c(0,20), main = "Correl of SS and SD (g67)")
## 学級規模データの読み込み
setwd("/Users/koyo/Dropbox/000078_CSKAKEN/01_CSNC")
csnc.all <- read_excel("sho_csnc.xlsx")
# 学校データ整形
setwd("/Users/koyo/Dropbox/000078_CSKAKEN/190700_Anal")
#### 統廃合のない学校のみを対象 複式設置校を除外
csnc.taisho_ <- dplyr::filter(csnc.all, taisho.g1 == 1 &
togo == 0 & nonrt == 0 & fuku == 0)
# 学校データを数値型にする
csnc.taisho <- select(csnc.taisho_,(c("taisho", "sid.new",
"nc.g1", "csmean.g1",
"nc.g2", "csmean.g2",
"nc.g3", "csmean.g3",
"nc.g4", "csmean.g4",
"nc.g5", "csmean.g5",
"nc.g6", "csmean.g6"
)))
csnc.taisho$taisho <- as.numeric(csnc.taisho$taisho)
csnc.taisho$sid.new <- as.numeric(csnc.taisho$sid.new)
csnc.taisho$nc.g1 <- as.numeric(csnc.taisho$nc.g1)
csnc.taisho$csmean.g1 <- as.numeric(csnc.taisho$csmean.g1)
csnc.taisho$nc.g2 <- as.numeric(csnc.taisho$nc.g2)
csnc.taisho$csmean.g2 <- as.numeric(csnc.taisho$csmean.g2)
csnc.taisho$nc.g3 <- as.numeric(csnc.taisho$nc.g3)
csnc.taisho$csmean.g3 <- as.numeric(csnc.taisho$csmean.g3)
csnc.taisho$nc.g4 <- as.numeric(csnc.taisho$nc.g4)
csnc.taisho$csmean.g4 <- as.numeric(csnc.taisho$csmean.g4)
csnc.taisho$nc.g5 <- as.numeric(csnc.taisho$nc.g5)
csnc.taisho$csmean.g5 <- as.numeric(csnc.taisho$csmean.g5)
csnc.taisho$nc.g6 <- as.numeric(csnc.taisho$nc.g6)
csnc.taisho$csmean.g6 <- as.numeric(csnc.taisho$csmean.g6)
csnc.nona <- na.omit(csnc.taisho)
colnames(csnc.nona) <- c("taisho", "sid",
"nc.g1", "cs.g1",
"nc.g2", "cs.g2",
"nc.g3", "cs.g3",
"nc.g4", "cs.g4",
"nc.g5", "cs.g5",
"nc.g6", "cs.g6"
)
csnc <- csnc.nona[,2:14]
## 学級規模を中心化する
### 各学年での平均
cs.m.g1 <- mean(csnc$cs.g1)
cs.m.g2 <- mean(csnc$cs.g2)
cs.m.g3 <- mean(csnc$cs.g3)
cs.m.g4 <- mean(csnc$cs.g4)
cs.m.g5 <- mean(csnc$cs.g5)
cs.m.g6 <- mean(csnc$cs.g6)
### 各学年の平均の平均
csm <- matrix(c(cs.m.g1, cs.m.g2, cs.m.g3, cs.m.g4, cs.m.g5, cs.m.g6), nrow=6, ncol=1)
csnc$cs.c.g1 <- csnc$cs.g1 - mean(csm)
csnc$cs.c.g2 <- csnc$cs.g2 - mean(csm)
csnc$cs.c.g3 <- csnc$cs.g3 - mean(csm)
csnc$cs.c.g4 <- csnc$cs.g4 - mean(csm)
csnc$cs.c.g5 <- csnc$cs.g5 - mean(csm)
csnc$cs.c.g6 <- csnc$cs.g6 - mean(csm)
## 学級規模変動差分データ列作成
csnc$cs.d12 <- csnc$cs.g2 - csnc$cs.g1
csnc$cs.d23 <- csnc$cs.g3 - csnc$cs.g2
csnc$cs.d34 <- csnc$cs.g4 - csnc$cs.g3
csnc$cs.d45 <- csnc$cs.g5 - csnc$cs.g4
csnc$cs.d56 <- csnc$cs.g6 - csnc$cs.g5
# 4-5年生
cs.45 <- csnc[c("sid", "nc.g4", "cs.g4", "cs.c.g4", "cs.d34")]
sdr.45 <- g37.msd[c("sid", "avg.34", "avg.45", "sdr.45")]
cs.sdr.45<-na.omit(data.frame(dplyr::inner_join(cs.45, sdr.45, by = "sid")))
# 5-6年生
cs.56 <- csnc[c("sid", "nc.g5", "cs.g5", "cs.c.g5", "cs.d45")]
sdr.56 <- g37.msd[c("sid", "avg.45", "avg.56", "sdr.56")]
cs.sdr.56<-na.omit(data.frame(dplyr::inner_join(cs.56, sdr.56, by = "sid")))
# 6-7年生
cs.67 <- csnc[c("sid", "nc.g6", "cs.g6", "cs.c.g6", "cs.d56")]
sdr.67 <- g37.msd[c("sid", "avg.56", "avg.67", "sdr.67")]
cs.sdr.67<-na.omit(data.frame(dplyr::inner_join(cs.67, sdr.67, by = "sid")))
#plot(cs.cvr.12$cs.g2, cs.cvr.12$cvr.12, xlim = c(0, 50), ylim = c(0,3), main = "cs.cvr.23")
plot(cs.sdr.45$cs.g4, cs.sdr.45$sdr.45, xlim = c(0, 50), ylim = c(0,3), main = "cs.sdr.45")
plot(cs.sdr.56$cs.g5, cs.sdr.56$sdr.56, xlim = c(0, 50), ylim = c(0,3), main = "cs.sdr.56")
plot(cs.sdr.67$cs.g6, cs.sdr.67$sdr.67, xlim = c(0, 50), ylim = c(0,3), main = "cs.sdr.67")
plot(cs.sdr.45$cs.d34, cs.sdr.45$sdr.45, xlim = c(-15, 15), ylim = c(0,3), main = "cs_d.sdr.45")
plot(cs.sdr.56$cs.d45, cs.sdr.56$sdr.56, xlim = c(-15, 15), ylim = c(0,3), main = "cs_d.sdr.56")
plot(cs.sdr.67$cs.d56, cs.sdr.67$sdr.67, xlim = c(-15, 15), ylim = c(0,3), main = "cs_d.sdr.56")
# 学級規模の中心化
cs.sdr.45$cs.c.g4 <- cs.sdr.45$cs.g4 - mean(cs.sdr.45$cs.g4)
cs.sdr.56$cs.c.g5 <- cs.sdr.56$cs.g5 - mean(cs.sdr.56$cs.g5)
cs.sdr.67$cs.c.g6 <- cs.sdr.67$cs.g6 - mean(cs.sdr.67$cs.g6)
# Prior achievementの中心化
cs.sdr.45$avg.c.34 <- cs.sdr.45$avg.34 - mean(cs.sdr.45$avg.34)
cs.sdr.56$avg.c.45 <- cs.sdr.56$avg.45 - mean(cs.sdr.56$avg.45)
cs.sdr.67$avg.c.56 <- cs.sdr.67$avg.56 - mean(cs.sdr.67$avg.56)
head(cs.sdr.45)
## sid nc.g4 cs.g4 cs.c.g4 cs.d34 avg.34 avg.45 sdr.45
## 38 18050 3 26.00000 3.69444444 0.00 50.22535 53.09859 0.9278309
## 39 18051 2 25.50000 3.19444444 0.50 52.68750 51.91667 1.0639151
## 40 18052 3 22.33333 0.02777778 0.00 49.35000 50.95000 0.7064820
## 41 18053 1 25.00000 2.69444444 1.00 56.65217 56.69565 0.8902944
## 56 18068 2 24.50000 2.19444444 -0.50 53.80435 50.13043 1.1886138
## 61 18073 4 26.25000 3.94444444 0.25 52.89474 52.51579 0.9858863
## avg.c.34
## 38 -2.72537856
## 39 -0.26323068
## 40 -3.60073068
## 41 3.70144324
## 56 0.85361715
## 61 -0.05599383
plot(cs.sdr.45$avg.c.34, cs.sdr.45$sdr.45, xlim = c(-15, 15), ylim = c(0,3), main = "Prior_3, sdr.45")
plot(cs.sdr.56$avg.c.45, cs.sdr.56$sdr.56, xlim = c(-15, 15), ylim = c(0,3), main = "Prior_4, sdr.56")
plot(cs.sdr.67$avg.c.56, cs.sdr.67$sdr.67, xlim = c(-15, 15), ylim = c(0,3), main = "Prior_5, sdr.56")
library(brms)
## Loading required package: Rcpp
## Loading required package: ggplot2
##
## Attaching package: 'ggplot2'
## The following objects are masked from 'package:psych':
##
## %+%, alpha
## Loading 'brms' package (version 2.7.0). Useful instructions
## can be found by typing help('brms'). A more detailed introduction
## to the package is available through vignette('brms_overview').
## Run theme_set(theme_default()) to use the default bayesplot theme.
##
## Attaching package: 'brms'
## The following object is masked from 'package:psych':
##
## cs
# 4年生終了時
res.45 <- brm(sdr.45 ~ cs.c.g4 + avg.c.34 + cs.c.g4:avg.c.34,
data =cs.sdr.45,
prior = c(set_prior("normal(0,10)", class = "b")),
chains = 4,
iter = 10000,
warmup = 3000
)
## Compiling the C++ model
## Start sampling
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 1).
## Chain 1:
## Chain 1: Gradient evaluation took 2.9e-05 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.29 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1:
## Chain 1:
## Chain 1: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 1: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 1: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 1: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 1: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 1: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 1: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 1: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 1: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 1: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 1: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 1: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 1:
## Chain 1: Elapsed Time: 0.171817 seconds (Warm-up)
## Chain 1: 0.281422 seconds (Sampling)
## Chain 1: 0.453239 seconds (Total)
## Chain 1:
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 2).
## Chain 2:
## Chain 2: Gradient evaluation took 1.1e-05 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.11 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2:
## Chain 2:
## Chain 2: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 2: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 2: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 2: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 2: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 2: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 2: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 2: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 2: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 2: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 2: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 2: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 2:
## Chain 2: Elapsed Time: 0.178365 seconds (Warm-up)
## Chain 2: 0.283389 seconds (Sampling)
## Chain 2: 0.461754 seconds (Total)
## Chain 2:
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 3).
## Chain 3:
## Chain 3: Gradient evaluation took 1.2e-05 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.12 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3:
## Chain 3:
## Chain 3: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 3: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 3: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 3: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 3: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 3: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 3: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 3: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 3: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 3: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 3: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 3: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 3:
## Chain 3: Elapsed Time: 0.171434 seconds (Warm-up)
## Chain 3: 0.280547 seconds (Sampling)
## Chain 3: 0.451981 seconds (Total)
## Chain 3:
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 4).
## Chain 4:
## Chain 4: Gradient evaluation took 1.2e-05 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.12 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4:
## Chain 4:
## Chain 4: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 4: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 4: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 4: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 4: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 4: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 4: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 4: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 4: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 4: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 4: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 4: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 4:
## Chain 4: Elapsed Time: 0.174549 seconds (Warm-up)
## Chain 4: 0.28282 seconds (Sampling)
## Chain 4: 0.457369 seconds (Total)
## Chain 4:
print(res.45, digits = 3)
## Family: gaussian
## Links: mu = identity; sigma = identity
## Formula: sdr.45 ~ cs.c.g4 + avg.c.34 + cs.c.g4:avg.c.34
## Data: cs.sdr.45 (Number of observations: 48)
## Samples: 4 chains, each with iter = 10000; warmup = 3000; thin = 1;
## total post-warmup samples = 28000
##
## Population-Level Effects:
## Estimate Est.Error l-95% CI u-95% CI Eff.Sample Rhat
## Intercept 0.939 0.034 0.873 1.005 34167 1.000
## cs.c.g4 0.005 0.005 -0.005 0.015 29760 1.000
## avg.c.34 0.015 0.010 -0.004 0.035 31159 1.000
## cs.c.g4:avg.c.34 -0.001 0.002 -0.005 0.003 34629 1.000
##
## Family Specific Parameters:
## Estimate Est.Error l-95% CI u-95% CI Eff.Sample Rhat
## sigma 0.212 0.023 0.172 0.264 27994 1.000
##
## Samples were drawn using sampling(NUTS). For each parameter, Eff.Sample
## is a crude measure of effective sample size, and Rhat is the potential
## scale reduction factor on split chains (at convergence, Rhat = 1).
# 5年生終了時
res.56 <- brm(sdr.56 ~ cs.c.g5 + avg.c.45 + cs.c.g5:avg.c.45,
data =cs.sdr.56,
prior = c(set_prior("normal(0,10)", class = "b")),
chains = 4,
iter = 10000,
warmup = 3000
)
## Compiling the C++ model
## recompiling to avoid crashing R session
## Start sampling
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 1).
## Chain 1:
## Chain 1: Gradient evaluation took 3.5e-05 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.35 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1:
## Chain 1:
## Chain 1: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 1: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 1: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 1: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 1: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 1: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 1: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 1: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 1: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 1: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 1: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 1: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 1:
## Chain 1: Elapsed Time: 0.341538 seconds (Warm-up)
## Chain 1: 0.460219 seconds (Sampling)
## Chain 1: 0.801757 seconds (Total)
## Chain 1:
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 2).
## Chain 2:
## Chain 2: Gradient evaluation took 3.5e-05 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.35 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2:
## Chain 2:
## Chain 2: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 2: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 2: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 2: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 2: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 2: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 2: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 2: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 2: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 2: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 2: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 2: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 2:
## Chain 2: Elapsed Time: 0.343359 seconds (Warm-up)
## Chain 2: 0.429814 seconds (Sampling)
## Chain 2: 0.773173 seconds (Total)
## Chain 2:
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 3).
## Chain 3:
## Chain 3: Gradient evaluation took 1.4e-05 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.14 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3:
## Chain 3:
## Chain 3: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 3: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 3: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 3: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 3: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 3: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 3: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 3: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 3: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 3: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 3: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 3: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 3:
## Chain 3: Elapsed Time: 0.341744 seconds (Warm-up)
## Chain 3: 0.386379 seconds (Sampling)
## Chain 3: 0.728123 seconds (Total)
## Chain 3:
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 4).
## Chain 4:
## Chain 4: Gradient evaluation took 1.4e-05 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.14 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4:
## Chain 4:
## Chain 4: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 4: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 4: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 4: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 4: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 4: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 4: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 4: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 4: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 4: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 4: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 4: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 4:
## Chain 4: Elapsed Time: 0.341652 seconds (Warm-up)
## Chain 4: 0.670944 seconds (Sampling)
## Chain 4: 1.0126 seconds (Total)
## Chain 4:
print(res.56, digits = 3)
## Family: gaussian
## Links: mu = identity; sigma = identity
## Formula: sdr.56 ~ cs.c.g5 + avg.c.45 + cs.c.g5:avg.c.45
## Data: cs.sdr.56 (Number of observations: 117)
## Samples: 4 chains, each with iter = 10000; warmup = 3000; thin = 1;
## total post-warmup samples = 28000
##
## Population-Level Effects:
## Estimate Est.Error l-95% CI u-95% CI Eff.Sample Rhat
## Intercept 1.046 0.017 1.013 1.080 24817 1.000
## cs.c.g5 0.001 0.002 -0.003 0.006 32158 1.000
## avg.c.45 0.017 0.006 0.006 0.028 25861 1.000
## cs.c.g5:avg.c.45 -0.001 0.001 -0.002 0.001 35058 1.000
##
## Family Specific Parameters:
## Estimate Est.Error l-95% CI u-95% CI Eff.Sample Rhat
## sigma 0.180 0.012 0.158 0.206 24735 1.000
##
## Samples were drawn using sampling(NUTS). For each parameter, Eff.Sample
## is a crude measure of effective sample size, and Rhat is the potential
## scale reduction factor on split chains (at convergence, Rhat = 1).
# 6年生終了時
res.67 <- brm(sdr.67 ~ cs.c.g6 + avg.c.56 + cs.c.g6:avg.c.56,
data =cs.sdr.67,
prior = c(set_prior("normal(0,10)", class = "b")),
chains = 4,
iter = 10000,
warmup = 3000
)
## Compiling the C++ model
## recompiling to avoid crashing R session
## Start sampling
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 1).
## Chain 1:
## Chain 1: Gradient evaluation took 3.4e-05 seconds
## Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0.34 seconds.
## Chain 1: Adjust your expectations accordingly!
## Chain 1:
## Chain 1:
## Chain 1: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 1: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 1: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 1: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 1: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 1: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 1: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 1: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 1: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 1: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 1: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 1: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 1:
## Chain 1: Elapsed Time: 0.48227 seconds (Warm-up)
## Chain 1: 0.665594 seconds (Sampling)
## Chain 1: 1.14786 seconds (Total)
## Chain 1:
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 2).
## Chain 2:
## Chain 2: Gradient evaluation took 1.6e-05 seconds
## Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0.16 seconds.
## Chain 2: Adjust your expectations accordingly!
## Chain 2:
## Chain 2:
## Chain 2: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 2: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 2: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 2: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 2: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 2: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 2: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 2: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 2: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 2: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 2: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 2: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 2:
## Chain 2: Elapsed Time: 0.525393 seconds (Warm-up)
## Chain 2: 0.779992 seconds (Sampling)
## Chain 2: 1.30539 seconds (Total)
## Chain 2:
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 3).
## Chain 3:
## Chain 3: Gradient evaluation took 1.6e-05 seconds
## Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0.16 seconds.
## Chain 3: Adjust your expectations accordingly!
## Chain 3:
## Chain 3:
## Chain 3: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 3: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 3: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 3: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 3: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 3: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 3: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 3: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 3: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 3: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 3: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 3: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 3:
## Chain 3: Elapsed Time: 0.479106 seconds (Warm-up)
## Chain 3: 0.728665 seconds (Sampling)
## Chain 3: 1.20777 seconds (Total)
## Chain 3:
##
## SAMPLING FOR MODEL 'd35359081d7733aebc9e00ac9119bde7' NOW (CHAIN 4).
## Chain 4:
## Chain 4: Gradient evaluation took 1.3e-05 seconds
## Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 0.13 seconds.
## Chain 4: Adjust your expectations accordingly!
## Chain 4:
## Chain 4:
## Chain 4: Iteration: 1 / 10000 [ 0%] (Warmup)
## Chain 4: Iteration: 1000 / 10000 [ 10%] (Warmup)
## Chain 4: Iteration: 2000 / 10000 [ 20%] (Warmup)
## Chain 4: Iteration: 3000 / 10000 [ 30%] (Warmup)
## Chain 4: Iteration: 3001 / 10000 [ 30%] (Sampling)
## Chain 4: Iteration: 4000 / 10000 [ 40%] (Sampling)
## Chain 4: Iteration: 5000 / 10000 [ 50%] (Sampling)
## Chain 4: Iteration: 6000 / 10000 [ 60%] (Sampling)
## Chain 4: Iteration: 7000 / 10000 [ 70%] (Sampling)
## Chain 4: Iteration: 8000 / 10000 [ 80%] (Sampling)
## Chain 4: Iteration: 9000 / 10000 [ 90%] (Sampling)
## Chain 4: Iteration: 10000 / 10000 [100%] (Sampling)
## Chain 4:
## Chain 4: Elapsed Time: 0.489357 seconds (Warm-up)
## Chain 4: 0.669361 seconds (Sampling)
## Chain 4: 1.15872 seconds (Total)
## Chain 4:
print(res.67, digits = 3)
## Family: gaussian
## Links: mu = identity; sigma = identity
## Formula: sdr.67 ~ cs.c.g6 + avg.c.56 + cs.c.g6:avg.c.56
## Data: cs.sdr.67 (Number of observations: 167)
## Samples: 4 chains, each with iter = 10000; warmup = 3000; thin = 1;
## total post-warmup samples = 28000
##
## Population-Level Effects:
## Estimate Est.Error l-95% CI u-95% CI Eff.Sample Rhat
## Intercept 1.055 0.014 1.029 1.081 22829 1.000
## cs.c.g6 -0.002 0.002 -0.006 0.001 26800 1.000
## avg.c.56 0.015 0.004 0.006 0.023 21191 1.000
## cs.c.g6:avg.c.56 0.001 0.001 -0.000 0.002 32753 1.000
##
## Family Specific Parameters:
## Estimate Est.Error l-95% CI u-95% CI Eff.Sample Rhat
## sigma 0.168 0.009 0.151 0.188 19765 1.000
##
## Samples were drawn using sampling(NUTS). For each parameter, Eff.Sample
## is a crude measure of effective sample size, and Rhat is the potential
## scale reduction factor on split chains (at convergence, Rhat = 1).