Pacotes

library("R2WinBUGS")
library("R2jags")
library("rstan")
library("nimble")
library("boa")
library("coda")
library("xtable")
library("kableExtra")
library("tictoc")


Dados

######################################################################################
#                                SIMULAÇÃO DOS DADOS                                 #
######################################################################################
set.seed(123)
#Parâmetros verdadeiros
beta0=-2
beta1=2
beta2=10
sigma2=5
n=100
tau=1/sigma2
#Geração
x1=rnorm(n,0,1)
x2=rnorm(n,2,10)
e=rnorm(n,0, sd = sqrt(sigma2))
y=beta0+beta1*x1+beta2*x2+e


BUGS

Código

######################################################################################
#                                          BUGS                                      #
######################################################################################
#Modelo do tipo BUGS
sink("mod1.txt")        
cat("
    MODEL LR1 {
    #Verossimilhança 
    for(i in 1:N) {
    y[i] ~ dnorm(mu[i], tau)
    mu[i] <- beta0 + beta1*x1[i] + beta2*x2[i]
    }
    #Proris
    beta0 ~ dnorm(0,0.1)
    beta1 ~ dnorm(0,0.1)
    beta2 ~ dnorm(0,0.1)
    tau ~ dexp(0.1)
    
    #Transformação para variância
    sigma2 <- 1/tau
    } ", fill = TRUE)
sink()
#Obeservação: A dist normal no bugs está definido para a precisão. 
#Tamanho amostral
N = length(y)
#Conjunto de dados observados
data = list("N","y","x1","x2")
#Parâmetros estudados
params = c("beta0", "beta1","beta2","tau","sigma2")  
#Valores iniciais
inits <- function () {list(beta0 = rnorm(1),
                           beta1 = rnorm(1),
                           beta2 = rnorm(1),
                           tau = 1)
                           sigma2 = 1}
#Modelo final
tic()
result <- bugs(data = data, inits = inits, parameters.to.save = params,
               model.file = "mod1.txt", n.chains = 3, n.iter = 10000,
               n.burnin = 1000, n.thin=9, bugs.directory = "C:\\Program Files\\WinBUGS14",
               debug = TRUE, save.history = TRUE, DIC = FALSE)
cannot create file 'C:\Program Files\WinBUGS14/System/Rsrc/Registry_Rsave.odc', reason 'Permission denied'cannot open file 'C:\Program Files\WinBUGS14/System/Rsrc/Registry.odc': Permission deniedError in file(con, "wb") : cannot open the connection
toc()
6.12 sec elapsed


Resultados

result$summary %>%
  kable() %>%
  kable_styling()

mean sd 2.5% 25% 50% 75% 97.5% Rhat n.eff
beta0 -1.7017525 0.2174842 -2.1550000 -1.84525 -1.70400 -1.554000 -1.279975 1.002923 830
beta1 1.6939550 0.2304004 1.2440000 1.53975 1.69900 1.845000 2.144000 1.001317 2400
beta2 10.0042037 0.0220753 9.9620000 9.98900 10.00000 10.020000 10.050000 1.000688 3000
tau 0.2250701 0.0327324 0.1658975 0.20160 0.22385 0.246025 0.293710 1.000817 3000
sigma2 4.5387863 0.6724222 3.4048749 4.06400 4.46750 4.959250 6.026150 1.000817 3000

bugs = result$sims.list
bugs_b0 = bugs$beta0
bugs_b1 = bugs$beta1
bugs_b2 = bugs$beta2
bugs_s2 = bugs$sigma2
bugs_tau = bugs$tau
plot(bugs_b0,type = "l",main=expression(beta[0]),ylab="",
     xlab="",cex.main=2)
abline(h=-2,col="red")

plot(bugs_b1,type = "l",main=expression(beta[1]),ylab="",
     xlab="",cex.main=2)
abline(h=2,col="red")

plot(bugs_b2,type = "l",main=expression(beta[2]),ylab="",
     xlab="",cex.main=2)
abline(h=10,col="red")

plot(bugs_s2,type = "l",main=expression(sigma^2),ylab="",
     xlab="",cex.main=2)
abline(h=5,col="red")

plot(bugs_tau,type = "l",main=expression(tau),ylab="",
     xlab="",cex.main=2)
abline(h=1/5,col="red")

densplot(as.mcmc(bugs_b0),main=expression(beta[0]),xlab="",ylab="",cex.main=2)

densplot(as.mcmc(bugs_b1),main=expression(beta[1]),xlab="",ylab="",cex.main=2)

densplot(as.mcmc(bugs_b2),main=expression(beta[2]),xlab="",ylab="",cex.main=2)

densplot(as.mcmc(bugs_s2),main=expression(sigma^2),xlab="",ylab="",cex.main=2)

densplot(as.mcmc(bugs_tau),main=expression(tau),xlab="",ylab="",cex.main=2)


JAGS

Código

######################################################################################
#                                          JAGS                                      #
######################################################################################
model_code = 'model{
    #Verossimilhança 
    for(i in 1:N) {
    y[i] ~ dnorm(mu[i], tau)
    mu[i] <- beta0 + beta1*x1[i] + beta2*x2[i]
    }
    #Proris
    beta0 ~ dnorm(0,0.1)
    beta1 ~ dnorm(0,0.1)
    beta2 ~ dnorm(0,0.1)
    tau ~ dexp(0.1)
    
    #Transformação para variância
    sigma2 <- 1/tau}'
  
N = length(y)
model_data = list(N = N, y = y, x1 = x1, x2 = x2)
model_parameters =  c("beta0", "beta1","beta2", "sigma2","tau")
tic()
model_run = jags(data = model_data,
                 parameters.to.save = model_parameters,
                 model.file=textConnection(model_code),
                 n.chains=3, # Number of different starting positions
                 n.iter=10000, # Number of iterations
                 n.burnin=1000, # Number of iterations to remove at start
                 n.thin=9,# Amount of thinning
                 DIC=FALSE) 
module glm loaded
module dic loaded
Compiling model graph
   Resolving undeclared variables
   Allocating nodes
Graph information:
   Observed stochastic nodes: 100
   Unobserved stochastic nodes: 4
   Total graph size: 609

Initializing model


  |                                                        
  |                                                  |   0%
  |                                                        
  |*                                                 |   2%
  |                                                        
  |**                                                |   4%
  |                                                        
  |***                                               |   7%
  |                                                        
  |****                                              |   9%
  |                                                        
  |******                                            |  11%
  |                                                        
  |*******                                           |  13%
  |                                                        
  |********                                          |  16%
  |                                                        
  |*********                                         |  18%
  |                                                        
  |**********                                        |  20%
  |                                                        
  |***********                                       |  22%
  |                                                        
  |************                                      |  24%
  |                                                        
  |*************                                     |  27%
  |                                                        
  |**************                                    |  29%
  |                                                        
  |****************                                  |  31%
  |                                                        
  |*****************                                 |  33%
  |                                                        
  |******************                                |  36%
  |                                                        
  |*******************                               |  38%
  |                                                        
  |********************                              |  40%
  |                                                        
  |*********************                             |  42%
  |                                                        
  |**********************                            |  44%
  |                                                        
  |***********************                           |  47%
  |                                                        
  |************************                          |  49%
  |                                                        
  |**************************                        |  51%
  |                                                        
  |***************************                       |  53%
  |                                                        
  |****************************                      |  56%
  |                                                        
  |*****************************                     |  58%
  |                                                        
  |******************************                    |  60%
  |                                                        
  |*******************************                   |  62%
  |                                                        
  |********************************                  |  64%
  |                                                        
  |*********************************                 |  67%
  |                                                        
  |**********************************                |  69%
  |                                                        
  |************************************              |  71%
  |                                                        
  |*************************************             |  73%
  |                                                        
  |**************************************            |  76%
  |                                                        
  |***************************************           |  78%
  |                                                        
  |****************************************          |  80%
  |                                                        
  |*****************************************         |  82%
  |                                                        
  |******************************************        |  84%
  |                                                        
  |*******************************************       |  87%
  |                                                        
  |********************************************      |  89%
  |                                                        
  |**********************************************    |  91%
  |                                                        
  |***********************************************   |  93%
  |                                                        
  |************************************************  |  96%
  |                                                        
  |************************************************* |  98%
  |                                                        
  |**************************************************| 100%
toc()
1.53 sec elapsed


Resultados

model_run$BUGSoutput$summary %>%
  kable() %>%
  kable_styling()

mean sd 2.5% 25% 50% 75% 97.5% Rhat n.eff
beta0 -1.7076087 0.2190477 -2.1461585 -1.8507053 -1.7046471 -1.5691274 -1.2767817 1.002397 1100
beta1 1.6961675 0.2307555 1.2357823 1.5441611 1.6996822 1.8477681 2.1492119 1.000865 3000
beta2 10.0048006 0.0221120 9.9629177 9.9894164 10.0044162 10.0195144 10.0493598 1.001024 3000
sigma2 4.5286361 0.6615459 3.3978227 4.0576457 4.4566326 4.9135678 5.9771186 1.000677 3000
tau 0.2254174 0.0322019 0.1673047 0.2035181 0.2243847 0.2464483 0.2943061 1.000677 3000

jags = model_run[["BUGSoutput"]][["sims.matrix"]]
jags_b0 = jags[,1]
jags_b1 = jags[,2]
jags_b2 = jags[,3]
jags_s2 = jags[,4]
jags_tau = jags[,5]
plot(jags_b0,type = "l",main=expression(beta[0]),ylab="",
     xlab="",cex.main=2)
abline(h=beta0,col="red")

plot(jags_b1,type = "l",main=expression(beta[1]),ylab="",
     xlab="",cex.main=2)
abline(h=beta1,col="red")

plot(bugs_b2,type = "l",main=expression(beta[2]),ylab="",
     xlab="",cex.main=2)
abline(h=10,col="red")

plot(jags_s2,type = "l",main=expression(sigma^2),ylab="",
     xlab="",cex.main=2)
abline(h=sigma2,col="red")

plot(jags_tau,type = "l",main=expression(tau),ylab="",
     xlab="",cex.main=2)
abline(h=tau,col="red")

densplot(as.mcmc(jags_b0),main=expression(beta[0]),xlab="",ylab="",cex.main=2)

densplot(as.mcmc(jags_b1),main=expression(beta[1]),xlab="",ylab="",cex.main=2)

densplot(as.mcmc(bugs_b2),main=expression(beta[2]),xlab="",ylab="",cex.main=2)

densplot(as.mcmc(jags_s2),main=expression(sigma^2),xlab="",ylab="",cex.main=2)

densplot(as.mcmc(jags_tau),main=expression(tau),xlab="",ylab="",cex.main=2)


STAN

Código

######################################################################################
#                                          STAN                                      #
######################################################################################
model = "data {
int<lower=1> N;
vector[N] y;
vector[N] x1;
vector[N] x2;
}
parameters {
real alpha;
real beta1;
real beta2;
real<lower=0> sigma2;
}
model {
alpha ~ normal(0, 10);   
beta1 ~ normal(0, 10); 
beta2 ~ normal(0, 10);
sigma2 ~ exponential(0.1);
y ~ normal(alpha + beta1 * x1 + beta2*x2, sqrt(sigma2));
}
generated quantities {
vector[N] y_rep;
for(n in 1:N) {
y_rep[n] = normal_rng(alpha + beta1 * x1[n] + beta2*x2[n], sqrt(sigma2));
}
}"
data <- list(y = y, x1 = x1, x2 = x2 , N = 100)
tic()
fit <- stan(model_code = model,
            data = data, iter = 2000, warmup = 1000, chains = 3)

SAMPLING FOR MODEL 'e1b1399f422eab1042231e247cc960d0' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 0 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 0 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 0.266 seconds (Warm-up)
Chain 1:                0.253 seconds (Sampling)
Chain 1:                0.519 seconds (Total)
Chain 1: 

SAMPLING FOR MODEL 'e1b1399f422eab1042231e247cc960d0' NOW (CHAIN 2).
Chain 2: 
Chain 2: Gradient evaluation took 0 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 0 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2: 
Chain 2: 
Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 0.247 seconds (Warm-up)
Chain 2:                0.283 seconds (Sampling)
Chain 2:                0.53 seconds (Total)
Chain 2: 

SAMPLING FOR MODEL 'e1b1399f422eab1042231e247cc960d0' NOW (CHAIN 3).
Chain 3: 
Chain 3: Gradient evaluation took 0 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 0 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 0.254 seconds (Warm-up)
Chain 3:                0.296 seconds (Sampling)
Chain 3:                0.55 seconds (Total)
Chain 3: 
stan = extract(fit)
toc()
2.92 sec elapsed


Resultados

summary(fit,probs = c(0.025, 0.25, 0.50, 0.75, 0.975),pars=c("alpha","beta1","beta2","sigma2")) %>%
  kable() %>%
  kable_styling()

mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
alpha -1.711879 0.0039585 0.2178438 -2.149853 -1.856630 -1.715558 -1.562930 -1.289226 3028.469 0.9998163
beta1 1.696990 0.0041150 0.2394934 1.230432 1.533910 1.700755 1.859659 2.173669 3387.191 0.9992961
beta2 10.005300 0.0003758 0.0219040 9.962440 9.990545 10.004899 10.020008 10.048265 3396.696 0.9996994
sigma2 4.679846 0.0135358 0.7072477 3.532962 4.179450 4.615137 5.084224 6.219188 2730.089 0.9997792
mean.chain:1 sd.chain:1 2.5%.chain:1 25%.chain:1 50%.chain:1 75%.chain:1 97.5%.chain:1 mean.chain:2 sd.chain:2 2.5%.chain:2 25%.chain:2 50%.chain:2 75%.chain:2 97.5%.chain:2 mean.chain:3 sd.chain:3 2.5%.chain:3 25%.chain:3 50%.chain:3 75%.chain:3 97.5%.chain:3
alpha -1.715337 0.2112608 -2.139096 -1.853517 -1.714885 -1.572406 -1.314811 -1.705013 0.2255289 -2.120103 -1.868463 -1.705580 -1.542935 -1.267033 -1.715288 0.2165586 -2.167543 -1.852588 -1.721561 -1.573138 -1.304341
beta1 1.701260 0.2390240 1.241720 1.541517 1.705869 1.865035 2.176518 1.692523 0.2392074 1.208954 1.532642 1.694606 1.853114 2.172485 1.697187 0.2404062 1.237553 1.529907 1.694986 1.859664 2.173477
beta2 10.005832 0.0227089 9.959931 9.990109 10.006705 10.021240 10.048320 10.004367 0.0213496 9.962344 9.990137 10.003969 10.019176 10.046483 10.005701 0.0216216 9.963733 9.991673 10.004829 10.019762 10.048821
sigma2 4.666903 0.7199613 3.495833 4.160133 4.606047 5.087435 6.183732 4.701225 0.7094819 3.576787 4.197537 4.641227 5.082535 6.308242 4.671410 0.6922385 3.526416 4.167105 4.595169 5.084195 6.129910

length(stan_b0)
[1] 6000
stan_b0 = stan$alpha
stan_b1 = stan$beta1
stan_b2 = stan$beta2
stan_s2 = stan$sigma2
par(mfrow=c(1,4))
plot(stan_b0,type = "l",main=expression(beta[0]),ylab="",
     xlab="",cex.main=2)
abline(h=-2,col="red")
plot(stan_b1,type = "l",main=expression(beta[1]),ylab="",
     xlab="",cex.main=2)
abline(h=2,col="red")
plot(stan_b2,type = "l",main=expression(beta[2]),ylab="",
     xlab="",cex.main=2)
abline(h=10,col="red")
plot(stan_s2,type = "l",main=expression(sigma^2),ylab="",
     xlab="",cex.main=2)
abline(h=5,col="red")
par(mfrow=c(1,4))

plot(density(stan_b0),main=expression(beta[0]),cex.main=2)
plot(density(stan_b1),main=expression(beta[1]),cex.main=2)
plot(density(stan_b2),main=expression(beta[2]),cex.main=2)
plot(density(stan_s2),main=expression(sigma^2),cex.main=2)


NIMBLE

Código

######################################################################################
#                                        NIMBLE                                     #
######################################################################################
simpleCode1 <- nimbleCode({
  beta0 ~ dnorm(0,0.1)
  beta1 ~ dnorm(0,0.1)
  beta2 ~ dnorm(0,0.1)
  tau ~ dexp(0.1)
  sigma2 <- 1/tau
  for(i in 1:N) {
    Ypred[i] <- beta0 + beta1 * x1[i] + beta2*x2[i]
    Y[i] ~ dnorm(Ypred[i], tau)
  }
})
simpleModel1 <- nimbleModel(simpleCode1,
                            data = list(Y = y, x1 = x1,x2=x2),
                            constants = list(N = N),
                            inits = list(beta0 = 0, beta1 = 0,beta2=0, tau = 2 ))
defining model...
building model...
setting data and initial values...
running calculate on model (any error reports that follow may simply reflect missing values in model variables) ... 
checking model sizes and dimensions...
model building finished.
tic()
mcmc.out <- nimbleMCMC(code = simpleModel1, 
                       data = list(Y = y, x1 = x1,x2=x2), 
                       inits = list(beta0 = 0, beta1 = 0,beta2=0, tau = 2),
                       monitors=c("beta0","beta1","beta2","tau","sigma2"),
                       nchains = 3, 
                       niter = 10000,
                       thin = 9,
                       nburnin = 1000,
                       summary = TRUE, 
                       progressBar = TRUE)
compiling... this may take a minute. Use 'showCompilerOutput = TRUE' to see C++ compiler details.
compilation finished.
runMCMC's handling of nburnin changed in nimble version 0.6-11. Previously, nburnin samples were discarded *post-thinning*.  Now nburnin samples are discarded *pre-thinning*.  The number of samples returned will be floor((niter-nburnin)/thin).
running chain 1...
|-------------|-------------|-------------|-------------|
|-------------------------------------------------------|
running chain 2...
|-------------|-------------|-------------|-------------|
|-------------------------------------------------------|
running chain 3...
|-------------|-------------|-------------|-------------|
|-------------------------------------------------------|
toc()
47.8 sec elapsed


Resultados

mcmc.out$summary %>%
  kable() %>%
  kable_styling()

Mean Median St.Dev. 95%CI_low 95%CI_upp
beta0 -1.6907839 -1.6932187 0.2168434 -2.1003077 -1.2676358
beta1 1.6991037 1.7033088 0.2401949 1.2422207 2.1571916
beta2 10.0051223 10.0048771 0.0221609 9.9636470 10.0482092
sigma2 4.5369650 4.4801642 0.6590663 3.4497420 6.0173667
tau 0.2249185 0.2232061 0.0317228 0.1661859 0.2898768
Mean Median St.Dev. 95%CI_low 95%CI_upp
beta0 -1.7000078 -1.6961722 0.2158000 -2.1396323 -1.2913578
beta1 1.7014881 1.7066942 0.2369020 1.2474275 2.1565706
beta2 10.0047332 10.0053273 0.0221737 9.9618107 10.0465519
sigma2 4.5332345 4.4783386 0.6433217 3.4425397 5.9714646
tau 0.2249201 0.2232971 0.0312156 0.1674631 0.2904838
Mean Median St.Dev. 95%CI_low 95%CI_upp
beta0 -1.7106974 -1.7121757 0.2184733 -2.1165309 -1.2775843
beta1 1.6950962 1.6920260 0.2356570 1.2323745 2.1560240
beta2 10.0044966 10.0037646 0.0225031 9.9623308 10.0493197
sigma2 4.5105963 4.4462775 0.6546013 3.4364903 5.9487189
tau 0.2262509 0.2249073 0.0320573 0.1681034 0.2909947
Mean Median St.Dev. 95%CI_low 95%CI_upp
beta0 -1.7004964 -1.7010251 0.2171219 -2.1187374 -1.2763349
beta1 1.6985627 1.7005404 0.2375277 1.2407543 2.1571318
beta2 10.0047840 10.0047980 0.0222739 9.9628890 10.0482932
sigma2 4.5269319 4.4668997 0.6522500 3.4394622 5.9747549
tau 0.2253632 0.2238689 0.0316628 0.1673709 0.2907431

nimble_b1  =  c(mcmc.out$samples$chain1[,1],mcmc.out$samples$chain2[,1],mcmc.out$samples$chain3[,1])
nimble_b1  =  c(mcmc.out$samples$chain1[,2],mcmc.out$samples$chain2[,2],mcmc.out$samples$chain3[,2])
nimble_b2  =  c(mcmc.out$samples$chain1[,3],mcmc.out$samples$chain2[,3],mcmc.out$samples$chain3[,3])
numble_s2  =  c(mcmc.out$samples$chain1[,4],mcmc.out$samples$chain2[,4],mcmc.out$samples$chain3[,4])
nimble_tau =  c(mcmc.out$samples$chain1[,5],mcmc.out$samples$chain2[,5],mcmc.out$samples$chain3[,5])
plot(nimble_b0,type = "l",main=expression(beta[0]),ylab="",
     xlab="",cex.main=2)
abline(h=-2,col="red")

plot(nimble_b1,type = "l",main=expression(beta[1]),ylab="",
     xlab="",cex.main=2)
abline(h=2,col="red")

plot(nimble_b2,type = "l",main=expression(beta[2]),ylab="",
     xlab="",cex.main=2)
abline(h=10,col="red")

plot(numble_s2,type = "l",main=expression(sigma^2),ylab="",
     xlab="",cex.main=2)
abline(h=5,col="red")

plot(nimble_tau,type = "l",main=expression(tau),ylab="",
     xlab="",cex.main=2)
abline(h=1/5,col="red")

plot(density(nimble_b0),main=expression(beta[0]),cex.main=2)

plot(density(nimble_b1),main=expression(beta[1]),cex.main=2)

plot(density(nimble_b2),main=expression(beta[2]),cex.main=2)

plot(density(numble_s2),main=expression(sigma^2),cex.main=2)

plot(density(nimble_tau),main=expression(tau),cex.main=2)

LS0tDQp0aXRsZTogIkNvbXBhcmHn428gZW50cmUgb3MgcGFjb3RlcyBkZSBNQ01DIg0KYXV0aG9yOiAiUmFmYWVsIENhYnJhbCBGZXJuYW5kZXoiDQpvdXRwdXQ6IGh0bWxfbm90ZWJvb2sNCi0tLQ0KPGJyPg0KDQoNCiNQYWNvdGVzDQoNCmBgYHtyfQ0KbGlicmFyeSgiUjJXaW5CVUdTIikNCmxpYnJhcnkoIlIyamFncyIpDQpsaWJyYXJ5KCJyc3RhbiIpDQpsaWJyYXJ5KCJuaW1ibGUiKQ0KbGlicmFyeSgiYm9hIikNCmxpYnJhcnkoImNvZGEiKQ0KbGlicmFyeSgieHRhYmxlIikNCmxpYnJhcnkoImthYmxlRXh0cmEiKQ0KbGlicmFyeSgidGljdG9jIikNCmBgYA0KPGJyPg0KDQojRGFkb3MNCmBgYHtyfQ0KIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMNCiMgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIFNJTVVMQcfDTyBET1MgREFET1MgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAjDQojIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw0KDQpzZXQuc2VlZCgxMjMpDQoNCiNQYXLibWV0cm9zIHZlcmRhZGVpcm9zDQpiZXRhMD0tMg0KYmV0YTE9Mg0KYmV0YTI9MTANCnNpZ21hMj01DQpuPTEwMA0KdGF1PTEvc2lnbWEyDQoNCiNHZXJh5+NvDQp4MT1ybm9ybShuLDAsMSkNCngyPXJub3JtKG4sMiwxMCkNCmU9cm5vcm0obiwwLCBzZCA9IHNxcnQoc2lnbWEyKSkNCnk9YmV0YTArYmV0YTEqeDErYmV0YTIqeDIrZQ0KDQoNCmBgYA0KPGJyPg0KDQojQlVHUw0KDQojI0PzZGlnbw0KYGBge3J9DQoNCiMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjDQojICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgQlVHUyAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIw0KIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMNCg0KI01vZGVsbyBkbyB0aXBvIEJVR1MNCnNpbmsoIm1vZDEudHh0IikgICAgICAgIA0KY2F0KCINCiAgICBNT0RFTCBMUjEgew0KDQogICAgI1Zlcm9zc2ltaWxoYW7nYSANCiAgICBmb3IoaSBpbiAxOk4pIHsNCiAgICB5W2ldIH4gZG5vcm0obXVbaV0sIHRhdSkNCiAgICBtdVtpXSA8LSBiZXRhMCArIGJldGExKngxW2ldICsgYmV0YTIqeDJbaV0NCiAgICB9DQoNCiAgICAjUHJvcmlzDQogICAgYmV0YTAgfiBkbm9ybSgwLDAuMSkNCiAgICBiZXRhMSB+IGRub3JtKDAsMC4xKQ0KICAgIGJldGEyIH4gZG5vcm0oMCwwLjEpDQogICAgdGF1IH4gZGV4cCgwLjEpDQogICAgDQogICAgI1RyYW5zZm9ybWHn428gcGFyYSB2YXJp4m5jaWENCiAgICBzaWdtYTIgPC0gMS90YXUNCiAgICB9ICIsIGZpbGwgPSBUUlVFKQ0Kc2luaygpDQoNCiNPYmVzZXJ2YefjbzogQSBkaXN0IG5vcm1hbCBubyBidWdzIGVzdOEgZGVmaW5pZG8gcGFyYSBhIHByZWNpc+NvLiANCg0KI1RhbWFuaG8gYW1vc3RyYWwNCk4gPSBsZW5ndGgoeSkNCg0KI0Nvbmp1bnRvIGRlIGRhZG9zIG9ic2VydmFkb3MNCmRhdGEgPSBsaXN0KCJOIiwieSIsIngxIiwieDIiKQ0KDQojUGFy4m1ldHJvcyBlc3R1ZGFkb3MNCnBhcmFtcyA9IGMoImJldGEwIiwgImJldGExIiwiYmV0YTIiLCJ0YXUiLCJzaWdtYTIiKSAgDQoNCiNWYWxvcmVzIGluaWNpYWlzDQppbml0cyA8LSBmdW5jdGlvbiAoKSB7bGlzdChiZXRhMCA9IHJub3JtKDEpLA0KICAgICAgICAgICAgICAgICAgICAgICAgICAgYmV0YTEgPSBybm9ybSgxKSwNCiAgICAgICAgICAgICAgICAgICAgICAgICAgIGJldGEyID0gcm5vcm0oMSksDQogICAgICAgICAgICAgICAgICAgICAgICAgICB0YXUgPSAxKQ0KICAgICAgICAgICAgICAgICAgICAgICAgICAgc2lnbWEyID0gMX0NCg0KI01vZGVsbyBmaW5hbA0KdGljKCkNCnJlc3VsdCA8LSBidWdzKGRhdGEgPSBkYXRhLCBpbml0cyA9IGluaXRzLCBwYXJhbWV0ZXJzLnRvLnNhdmUgPSBwYXJhbXMsDQogICAgICAgICAgICAgICBtb2RlbC5maWxlID0gIm1vZDEudHh0Iiwgbi5jaGFpbnMgPSAzLCBuLml0ZXIgPSAxMDAwMCwNCiAgICAgICAgICAgICAgIG4uYnVybmluID0gMTAwMCwgbi50aGluPTksIGJ1Z3MuZGlyZWN0b3J5ID0gIkM6XFxQcm9ncmFtIEZpbGVzXFxXaW5CVUdTMTQiLA0KICAgICAgICAgICAgICAgZGVidWcgPSBUUlVFLCBzYXZlLmhpc3RvcnkgPSBUUlVFLCBESUMgPSBGQUxTRSkNCnRvYygpDQoNCmBgYA0KPGJyPg0KDQojI1Jlc3VsdGFkb3MNCmBgYHtyfQ0KcmVzdWx0JHN1bW1hcnkgJT4lDQogIGthYmxlKCkgJT4lDQogIGthYmxlX3N0eWxpbmcoKQ0KDQpidWdzID0gcmVzdWx0JHNpbXMubGlzdA0KYnVnc19iMCA9IGJ1Z3MkYmV0YTANCmJ1Z3NfYjEgPSBidWdzJGJldGExDQpidWdzX2IyID0gYnVncyRiZXRhMg0KYnVnc19zMiA9IGJ1Z3Mkc2lnbWEyDQpidWdzX3RhdSA9IGJ1Z3MkdGF1DQoNCnBsb3QoYnVnc19iMCx0eXBlID0gImwiLG1haW49ZXhwcmVzc2lvbihiZXRhWzBdKSx5bGFiPSIiLA0KICAgICB4bGFiPSIiLGNleC5tYWluPTIpDQphYmxpbmUoaD0tMixjb2w9InJlZCIpDQpwbG90KGJ1Z3NfYjEsdHlwZSA9ICJsIixtYWluPWV4cHJlc3Npb24oYmV0YVsxXSkseWxhYj0iIiwNCiAgICAgeGxhYj0iIixjZXgubWFpbj0yKQ0KYWJsaW5lKGg9Mixjb2w9InJlZCIpDQpwbG90KGJ1Z3NfYjIsdHlwZSA9ICJsIixtYWluPWV4cHJlc3Npb24oYmV0YVsyXSkseWxhYj0iIiwNCiAgICAgeGxhYj0iIixjZXgubWFpbj0yKQ0KYWJsaW5lKGg9MTAsY29sPSJyZWQiKQ0KcGxvdChidWdzX3MyLHR5cGUgPSAibCIsbWFpbj1leHByZXNzaW9uKHNpZ21hXjIpLHlsYWI9IiIsDQogICAgIHhsYWI9IiIsY2V4Lm1haW49MikNCmFibGluZShoPTUsY29sPSJyZWQiKQ0KcGxvdChidWdzX3RhdSx0eXBlID0gImwiLG1haW49ZXhwcmVzc2lvbih0YXUpLHlsYWI9IiIsDQogICAgIHhsYWI9IiIsY2V4Lm1haW49MikNCmFibGluZShoPTEvNSxjb2w9InJlZCIpDQoNCmRlbnNwbG90KGFzLm1jbWMoYnVnc19iMCksbWFpbj1leHByZXNzaW9uKGJldGFbMF0pLHhsYWI9IiIseWxhYj0iIixjZXgubWFpbj0yKQ0KZGVuc3Bsb3QoYXMubWNtYyhidWdzX2IxKSxtYWluPWV4cHJlc3Npb24oYmV0YVsxXSkseGxhYj0iIix5bGFiPSIiLGNleC5tYWluPTIpDQpkZW5zcGxvdChhcy5tY21jKGJ1Z3NfYjIpLG1haW49ZXhwcmVzc2lvbihiZXRhWzJdKSx4bGFiPSIiLHlsYWI9IiIsY2V4Lm1haW49MikNCmRlbnNwbG90KGFzLm1jbWMoYnVnc19zMiksbWFpbj1leHByZXNzaW9uKHNpZ21hXjIpLHhsYWI9IiIseWxhYj0iIixjZXgubWFpbj0yKQ0KZGVuc3Bsb3QoYXMubWNtYyhidWdzX3RhdSksbWFpbj1leHByZXNzaW9uKHRhdSkseGxhYj0iIix5bGFiPSIiLGNleC5tYWluPTIpDQoNCg0KYGBgDQo8YnI+DQoNCiNKQUdTDQoNCiMjQ/NkaWdvDQpgYGB7cn0NCg0KIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMNCiMgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICBKQUdTICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAjDQojIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw0KbW9kZWxfY29kZSA9ICdtb2RlbHsNCiAgICAjVmVyb3NzaW1pbGhhbudhIA0KICAgIGZvcihpIGluIDE6Tikgew0KICAgIHlbaV0gfiBkbm9ybShtdVtpXSwgdGF1KQ0KICAgIG11W2ldIDwtIGJldGEwICsgYmV0YTEqeDFbaV0gKyBiZXRhMip4MltpXQ0KICAgIH0NCg0KICAgICNQcm9yaXMNCiAgICBiZXRhMCB+IGRub3JtKDAsMC4xKQ0KICAgIGJldGExIH4gZG5vcm0oMCwwLjEpDQogICAgYmV0YTIgfiBkbm9ybSgwLDAuMSkNCiAgICB0YXUgfiBkZXhwKDAuMSkNCiAgICANCiAgICAjVHJhbnNmb3JtYefjbyBwYXJhIHZhcmnibmNpYQ0KICAgIHNpZ21hMiA8LSAxL3RhdX0nDQogIA0KTiA9IGxlbmd0aCh5KQ0KDQptb2RlbF9kYXRhID0gbGlzdChOID0gTiwgeSA9IHksIHgxID0geDEsIHgyID0geDIpDQoNCm1vZGVsX3BhcmFtZXRlcnMgPSAgYygiYmV0YTAiLCAiYmV0YTEiLCJiZXRhMiIsICJzaWdtYTIiLCJ0YXUiKQ0KDQp0aWMoKQ0KbW9kZWxfcnVuID0gamFncyhkYXRhID0gbW9kZWxfZGF0YSwNCiAgICAgICAgICAgICAgICAgcGFyYW1ldGVycy50by5zYXZlID0gbW9kZWxfcGFyYW1ldGVycywNCiAgICAgICAgICAgICAgICAgbW9kZWwuZmlsZT10ZXh0Q29ubmVjdGlvbihtb2RlbF9jb2RlKSwNCiAgICAgICAgICAgICAgICAgbi5jaGFpbnM9MywgIyBOdW1iZXIgb2YgZGlmZmVyZW50IHN0YXJ0aW5nIHBvc2l0aW9ucw0KICAgICAgICAgICAgICAgICBuLml0ZXI9MTAwMDAsICMgTnVtYmVyIG9mIGl0ZXJhdGlvbnMNCiAgICAgICAgICAgICAgICAgbi5idXJuaW49MTAwMCwgIyBOdW1iZXIgb2YgaXRlcmF0aW9ucyB0byByZW1vdmUgYXQgc3RhcnQNCiAgICAgICAgICAgICAgICAgbi50aGluPTksIyBBbW91bnQgb2YgdGhpbm5pbmcNCiAgICAgICAgICAgICAgICAgRElDPUZBTFNFKSANCnRvYygpDQoNCg0KDQpgYGANCjxicj4NCg0KIyNSZXN1bHRhZG9zDQpgYGB7cn0NCm1vZGVsX3J1biRCVUdTb3V0cHV0JHN1bW1hcnkgJT4lDQogIGthYmxlKCkgJT4lDQogIGthYmxlX3N0eWxpbmcoKQ0KDQpqYWdzID0gbW9kZWxfcnVuW1siQlVHU291dHB1dCJdXVtbInNpbXMubWF0cml4Il1dDQpqYWdzX2IwID0gamFnc1ssMV0NCmphZ3NfYjEgPSBqYWdzWywyXQ0KamFnc19iMiA9IGphZ3NbLDNdDQpqYWdzX3MyID0gamFnc1ssNF0NCmphZ3NfdGF1ID0gamFnc1ssNV0NCg0KDQpwbG90KGphZ3NfYjAsdHlwZSA9ICJsIixtYWluPWV4cHJlc3Npb24oYmV0YVswXSkseWxhYj0iIiwNCiAgICAgeGxhYj0iIixjZXgubWFpbj0yKQ0KYWJsaW5lKGg9YmV0YTAsY29sPSJyZWQiKQ0KcGxvdChqYWdzX2IxLHR5cGUgPSAibCIsbWFpbj1leHByZXNzaW9uKGJldGFbMV0pLHlsYWI9IiIsDQogICAgIHhsYWI9IiIsY2V4Lm1haW49MikNCmFibGluZShoPWJldGExLGNvbD0icmVkIikNCnBsb3QoYnVnc19iMix0eXBlID0gImwiLG1haW49ZXhwcmVzc2lvbihiZXRhWzJdKSx5bGFiPSIiLA0KICAgICB4bGFiPSIiLGNleC5tYWluPTIpDQphYmxpbmUoaD0xMCxjb2w9InJlZCIpDQpwbG90KGphZ3NfczIsdHlwZSA9ICJsIixtYWluPWV4cHJlc3Npb24oc2lnbWFeMikseWxhYj0iIiwNCiAgICAgeGxhYj0iIixjZXgubWFpbj0yKQ0KYWJsaW5lKGg9c2lnbWEyLGNvbD0icmVkIikNCnBsb3QoamFnc190YXUsdHlwZSA9ICJsIixtYWluPWV4cHJlc3Npb24odGF1KSx5bGFiPSIiLA0KICAgICB4bGFiPSIiLGNleC5tYWluPTIpDQphYmxpbmUoaD10YXUsY29sPSJyZWQiKQ0KDQoNCg0KZGVuc3Bsb3QoYXMubWNtYyhqYWdzX2IwKSxtYWluPWV4cHJlc3Npb24oYmV0YVswXSkseGxhYj0iIix5bGFiPSIiLGNleC5tYWluPTIpDQpkZW5zcGxvdChhcy5tY21jKGphZ3NfYjEpLG1haW49ZXhwcmVzc2lvbihiZXRhWzFdKSx4bGFiPSIiLHlsYWI9IiIsY2V4Lm1haW49MikNCmRlbnNwbG90KGFzLm1jbWMoYnVnc19iMiksbWFpbj1leHByZXNzaW9uKGJldGFbMl0pLHhsYWI9IiIseWxhYj0iIixjZXgubWFpbj0yKQ0KZGVuc3Bsb3QoYXMubWNtYyhqYWdzX3MyKSxtYWluPWV4cHJlc3Npb24oc2lnbWFeMikseGxhYj0iIix5bGFiPSIiLGNleC5tYWluPTIpDQpkZW5zcGxvdChhcy5tY21jKGphZ3NfdGF1KSxtYWluPWV4cHJlc3Npb24odGF1KSx4bGFiPSIiLHlsYWI9IiIsY2V4Lm1haW49MikNCg0KYGBgDQo8YnI+DQoNCiNTVEFODQoNCiMjQ/NkaWdvDQpgYGB7cn0NCg0KDQojIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw0KIyAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIFNUQU4gICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICMNCiMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjDQoNCm1vZGVsID0gImRhdGEgew0KaW50PGxvd2VyPTE+IE47DQp2ZWN0b3JbTl0geTsNCnZlY3RvcltOXSB4MTsNCnZlY3RvcltOXSB4MjsNCn0NCnBhcmFtZXRlcnMgew0KcmVhbCBhbHBoYTsNCnJlYWwgYmV0YTE7DQpyZWFsIGJldGEyOw0KcmVhbDxsb3dlcj0wPiBzaWdtYTI7DQp9DQptb2RlbCB7DQphbHBoYSB+IG5vcm1hbCgwLCAxMCk7ICAgDQpiZXRhMSB+IG5vcm1hbCgwLCAxMCk7IA0KYmV0YTIgfiBub3JtYWwoMCwgMTApOw0Kc2lnbWEyIH4gZXhwb25lbnRpYWwoMC4xKTsNCnkgfiBub3JtYWwoYWxwaGEgKyBiZXRhMSAqIHgxICsgYmV0YTIqeDIsIHNxcnQoc2lnbWEyKSk7DQp9DQoNCmdlbmVyYXRlZCBxdWFudGl0aWVzIHsNCnZlY3RvcltOXSB5X3JlcDsNCmZvcihuIGluIDE6Tikgew0KeV9yZXBbbl0gPSBub3JtYWxfcm5nKGFscGhhICsgYmV0YTEgKiB4MVtuXSArIGJldGEyKngyW25dLCBzcXJ0KHNpZ21hMikpOw0KfQ0KfSINCg0KDQpkYXRhIDwtIGxpc3QoeSA9IHksIHgxID0geDEsIHgyID0geDIgLCBOID0gMTAwKQ0KDQp0aWMoKQ0KZml0IDwtIHN0YW4obW9kZWxfY29kZSA9IG1vZGVsLA0KICAgICAgICAgICAgZGF0YSA9IGRhdGEsIGl0ZXIgPSAyMDAwLCB3YXJtdXAgPSAxMDAwLCBjaGFpbnMgPSAzKQ0KDQoNCnN0YW4gPSBleHRyYWN0KGZpdCkNCnRvYygpDQoNCg0KYGBgDQo8YnI+DQoNCiMjUmVzdWx0YWRvcw0KYGBge3J9DQpzdW1tYXJ5KGZpdCxwcm9icyA9IGMoMC4wMjUsIDAuMjUsIDAuNTAsIDAuNzUsIDAuOTc1KSxwYXJzPWMoImFscGhhIiwiYmV0YTEiLCJiZXRhMiIsInNpZ21hMiIpKSAlPiUNCiAga2FibGUoKSAlPiUNCiAga2FibGVfc3R5bGluZygpDQoNCmxlbmd0aChzdGFuX2IwKQ0KDQpzdGFuX2IwID0gc3RhbiRhbHBoYQ0Kc3Rhbl9iMSA9IHN0YW4kYmV0YTENCnN0YW5fYjIgPSBzdGFuJGJldGEyDQpzdGFuX3MyID0gc3RhbiRzaWdtYTINCg0KDQpwYXIobWZyb3c9YygxLDQpKQ0KcGxvdChzdGFuX2IwLHR5cGUgPSAibCIsbWFpbj1leHByZXNzaW9uKGJldGFbMF0pLHlsYWI9IiIsDQogICAgIHhsYWI9IiIsY2V4Lm1haW49MikNCmFibGluZShoPS0yLGNvbD0icmVkIikNCnBsb3Qoc3Rhbl9iMSx0eXBlID0gImwiLG1haW49ZXhwcmVzc2lvbihiZXRhWzFdKSx5bGFiPSIiLA0KICAgICB4bGFiPSIiLGNleC5tYWluPTIpDQphYmxpbmUoaD0yLGNvbD0icmVkIikNCnBsb3Qoc3Rhbl9iMix0eXBlID0gImwiLG1haW49ZXhwcmVzc2lvbihiZXRhWzJdKSx5bGFiPSIiLA0KICAgICB4bGFiPSIiLGNleC5tYWluPTIpDQphYmxpbmUoaD0xMCxjb2w9InJlZCIpDQpwbG90KHN0YW5fczIsdHlwZSA9ICJsIixtYWluPWV4cHJlc3Npb24oc2lnbWFeMikseWxhYj0iIiwNCiAgICAgeGxhYj0iIixjZXgubWFpbj0yKQ0KYWJsaW5lKGg9NSxjb2w9InJlZCIpDQoNCg0KcGFyKG1mcm93PWMoMSw0KSkNCnBsb3QoZGVuc2l0eShzdGFuX2IwKSxtYWluPWV4cHJlc3Npb24oYmV0YVswXSksY2V4Lm1haW49MikNCnBsb3QoZGVuc2l0eShzdGFuX2IxKSxtYWluPWV4cHJlc3Npb24oYmV0YVsxXSksY2V4Lm1haW49MikNCnBsb3QoZGVuc2l0eShzdGFuX2IyKSxtYWluPWV4cHJlc3Npb24oYmV0YVsyXSksY2V4Lm1haW49MikNCnBsb3QoZGVuc2l0eShzdGFuX3MyKSxtYWluPWV4cHJlc3Npb24oc2lnbWFeMiksY2V4Lm1haW49MikNCmBgYA0KDQo8YnI+DQoNCiNOSU1CTEUNCg0KIyND82RpZ28NCmBgYHtyfQ0KDQoNCiMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjDQojICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIE5JTUJMRSAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAjDQojIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw0KDQpzaW1wbGVDb2RlMSA8LSBuaW1ibGVDb2RlKHsNCiAgYmV0YTAgfiBkbm9ybSgwLDAuMSkNCiAgYmV0YTEgfiBkbm9ybSgwLDAuMSkNCiAgYmV0YTIgfiBkbm9ybSgwLDAuMSkNCiAgdGF1IH4gZGV4cCgwLjEpDQogIHNpZ21hMiA8LSAxL3RhdQ0KICBmb3IoaSBpbiAxOk4pIHsNCiAgICBZcHJlZFtpXSA8LSBiZXRhMCArIGJldGExICogeDFbaV0gKyBiZXRhMip4MltpXQ0KICAgIFlbaV0gfiBkbm9ybShZcHJlZFtpXSwgdGF1KQ0KICB9DQp9KQ0KDQoNCg0KDQoNCnNpbXBsZU1vZGVsMSA8LSBuaW1ibGVNb2RlbChzaW1wbGVDb2RlMSwNCiAgICAgICAgICAgICAgICAgICAgICAgICAgICBkYXRhID0gbGlzdChZID0geSwgeDEgPSB4MSx4Mj14MiksDQogICAgICAgICAgICAgICAgICAgICAgICAgICAgY29uc3RhbnRzID0gbGlzdChOID0gTiksDQogICAgICAgICAgICAgICAgICAgICAgICAgICAgaW5pdHMgPSBsaXN0KGJldGEwID0gMCwgYmV0YTEgPSAwLGJldGEyPTAsIHRhdSA9IDIgKSkNCg0KdGljKCkNCm1jbWMub3V0IDwtIG5pbWJsZU1DTUMoY29kZSA9IHNpbXBsZU1vZGVsMSwgDQogICAgICAgICAgICAgICAgICAgICAgIGRhdGEgPSBsaXN0KFkgPSB5LCB4MSA9IHgxLHgyPXgyKSwgDQogICAgICAgICAgICAgICAgICAgICAgIGluaXRzID0gbGlzdChiZXRhMCA9IDAsIGJldGExID0gMCxiZXRhMj0wLCB0YXUgPSAyKSwNCiAgICAgICAgICAgICAgICAgICAgICAgbW9uaXRvcnM9YygiYmV0YTAiLCJiZXRhMSIsImJldGEyIiwidGF1Iiwic2lnbWEyIiksDQogICAgICAgICAgICAgICAgICAgICAgIG5jaGFpbnMgPSAzLCANCiAgICAgICAgICAgICAgICAgICAgICAgbml0ZXIgPSAxMDAwMCwNCiAgICAgICAgICAgICAgICAgICAgICAgdGhpbiA9IDksDQogICAgICAgICAgICAgICAgICAgICAgIG5idXJuaW4gPSAxMDAwLA0KICAgICAgICAgICAgICAgICAgICAgICBzdW1tYXJ5ID0gVFJVRSwgDQogICAgICAgICAgICAgICAgICAgICAgIHByb2dyZXNzQmFyID0gVFJVRSkNCnRvYygpDQoNCmBgYA0KPGJyPg0KDQojI1Jlc3VsdGFkb3MNCmBgYHtyfQ0KbWNtYy5vdXQkc3VtbWFyeSAlPiUNCiAga2FibGUoKSAlPiUNCiAga2FibGVfc3R5bGluZygpDQoNCg0KbmltYmxlX2IxICA9ICBjKG1jbWMub3V0JHNhbXBsZXMkY2hhaW4xWywxXSxtY21jLm91dCRzYW1wbGVzJGNoYWluMlssMV0sbWNtYy5vdXQkc2FtcGxlcyRjaGFpbjNbLDFdKQ0KbmltYmxlX2IxICA9ICBjKG1jbWMub3V0JHNhbXBsZXMkY2hhaW4xWywyXSxtY21jLm91dCRzYW1wbGVzJGNoYWluMlssMl0sbWNtYy5vdXQkc2FtcGxlcyRjaGFpbjNbLDJdKQ0KbmltYmxlX2IyICA9ICBjKG1jbWMub3V0JHNhbXBsZXMkY2hhaW4xWywzXSxtY21jLm91dCRzYW1wbGVzJGNoYWluMlssM10sbWNtYy5vdXQkc2FtcGxlcyRjaGFpbjNbLDNdKQ0KbnVtYmxlX3MyICA9ICBjKG1jbWMub3V0JHNhbXBsZXMkY2hhaW4xWyw0XSxtY21jLm91dCRzYW1wbGVzJGNoYWluMlssNF0sbWNtYy5vdXQkc2FtcGxlcyRjaGFpbjNbLDRdKQ0KbmltYmxlX3RhdSA9ICBjKG1jbWMub3V0JHNhbXBsZXMkY2hhaW4xWyw1XSxtY21jLm91dCRzYW1wbGVzJGNoYWluMlssNV0sbWNtYy5vdXQkc2FtcGxlcyRjaGFpbjNbLDVdKQ0KDQoNCnBsb3QobmltYmxlX2IwLHR5cGUgPSAibCIsbWFpbj1leHByZXNzaW9uKGJldGFbMF0pLHlsYWI9IiIsDQogICAgIHhsYWI9IiIsY2V4Lm1haW49MikNCmFibGluZShoPS0yLGNvbD0icmVkIikNCnBsb3QobmltYmxlX2IxLHR5cGUgPSAibCIsbWFpbj1leHByZXNzaW9uKGJldGFbMV0pLHlsYWI9IiIsDQogICAgIHhsYWI9IiIsY2V4Lm1haW49MikNCmFibGluZShoPTIsY29sPSJyZWQiKQ0KcGxvdChuaW1ibGVfYjIsdHlwZSA9ICJsIixtYWluPWV4cHJlc3Npb24oYmV0YVsyXSkseWxhYj0iIiwNCiAgICAgeGxhYj0iIixjZXgubWFpbj0yKQ0KYWJsaW5lKGg9MTAsY29sPSJyZWQiKQ0KcGxvdChudW1ibGVfczIsdHlwZSA9ICJsIixtYWluPWV4cHJlc3Npb24oc2lnbWFeMikseWxhYj0iIiwNCiAgICAgeGxhYj0iIixjZXgubWFpbj0yKQ0KYWJsaW5lKGg9NSxjb2w9InJlZCIpDQpwbG90KG5pbWJsZV90YXUsdHlwZSA9ICJsIixtYWluPWV4cHJlc3Npb24odGF1KSx5bGFiPSIiLA0KICAgICB4bGFiPSIiLGNleC5tYWluPTIpDQphYmxpbmUoaD0xLzUsY29sPSJyZWQiKQ0KDQoNCg0KcGxvdChkZW5zaXR5KG5pbWJsZV9iMCksbWFpbj1leHByZXNzaW9uKGJldGFbMF0pLGNleC5tYWluPTIpDQpwbG90KGRlbnNpdHkobmltYmxlX2IxKSxtYWluPWV4cHJlc3Npb24oYmV0YVsxXSksY2V4Lm1haW49MikNCnBsb3QoZGVuc2l0eShuaW1ibGVfYjIpLG1haW49ZXhwcmVzc2lvbihiZXRhWzJdKSxjZXgubWFpbj0yKQ0KcGxvdChkZW5zaXR5KG51bWJsZV9zMiksbWFpbj1leHByZXNzaW9uKHNpZ21hXjIpLGNleC5tYWluPTIpDQpwbG90KGRlbnNpdHkobmltYmxlX3RhdSksbWFpbj1leHByZXNzaW9uKHRhdSksY2V4Lm1haW49MikNCg0KYGBgDQoNCg0KDQo=