Bugs

########################################################################################
#############                          BUGS                                ############# 
########################################################################################
set.seed(2)
y <- rnorm(100,3,2)
y.cens <- ifelse(y>0,y,NA)
sink("regression.txt")
cat(" 
    model
    {
    # priors
    a ~ dnorm(0,0.01)
    b ~ dgamma(1,10)
    
    # likelihood
    for(i in 1:N)
    {
    y[i] ~ dnorm(a, b)C(0,1)}
    }
    ")
sink()
my.data <- list(y=y.cens, N=100)
params <- c("a", "b")
inits <- function(){list (beta0=1,beta1=1)}
res <- bugs(data=my.data, inits=inits, parameters.to.save=params,
            n.iter=10000, n.chains=1, n.burnin=1000,
            model.file="regression.txt",DIC=FALSE)
res$summary
       mean         sd      2.5%    25%    50%   75%     97.5%
a 3.0908169 0.21523668 2.6649750 2.9480 3.0910 3.236 3.5130000
b 0.2211367 0.03116677 0.1643975 0.1996 0.2198 0.241 0.2863025
hdi(res$sims.matrix)
          a      b
lower 2.675 0.1635
upper 3.521 0.2847
attr(,"credMass")
[1] 0.95
plot(res$sims.matrix[,1],type="l")
abline(h=3,col="red")

plot(res$sims.matrix[,2],type="l")
abline(h=0.25,col="red")

Jags

########################################################################################
#############                          JAGS                                ############# 
########################################################################################
set.seed(2)
y = rnorm(1000,3,2)
y.censored <- ifelse(y>=0, y, NA)
y.ind <- as.numeric(y>=0)
N = length(y)
model_code=  'model {
for (j in 1:N){
y.ind[j] ~ dinterval(y.censored[j], 0)
y.censored[j] ~ dnorm(a,b)
}
a ~ dnorm(0, .0001)
b ~ dgamma(0.1, .01)
sigma2 <- 1/b
}'
tobit.inits <- function() {
  list(a=0, b=1, 
       y.censored=ifelse(y.ind, NA, -abs(rnorm(N, 0, 1))))
}
m <- jags(model.file=textConnection(model_code),
          parameters.to.save = c("a", "b"),
          data=list(y.censored=y.censored, y.ind=y.ind, N=N),
          inits=tobit.inits,DIC=FALSE)
Compiling model graph
   Resolving undeclared variables
   Allocating nodes
Graph information:
   Observed stochastic nodes: 1935
   Unobserved stochastic nodes: 67
   Total graph size: 2009

Initializing model


  |                                                        
  |                                                  |   0%
  |                                                        
  |**                                                |   4%
  |                                                        
  |****                                              |   8%
  |                                                        
  |******                                            |  12%
  |                                                        
  |********                                          |  16%
  |                                                        
  |**********                                        |  20%
  |                                                        
  |************                                      |  24%
  |                                                        
  |**************                                    |  28%
  |                                                        
  |****************                                  |  32%
  |                                                        
  |******************                                |  36%
  |                                                        
  |********************                              |  40%
  |                                                        
  |**********************                            |  44%
  |                                                        
  |************************                          |  48%
  |                                                        
  |**************************                        |  52%
  |                                                        
  |****************************                      |  56%
  |                                                        
  |******************************                    |  60%
  |                                                        
  |********************************                  |  64%
  |                                                        
  |**********************************                |  68%
  |                                                        
  |************************************              |  72%
  |                                                        
  |**************************************            |  76%
  |                                                        
  |****************************************          |  80%
  |                                                        
  |******************************************        |  84%
  |                                                        
  |********************************************      |  88%
  |                                                        
  |**********************************************    |  92%
  |                                                        
  |************************************************  |  96%
  |                                                        
  |**************************************************| 100%
m$BUGSoutput$summary
       mean         sd      2.5%       25%       50%      75%     97.5%     Rhat n.eff
a 3.1152969 0.06425348 2.9888148 3.0734665 3.1152774 3.157698 3.2387158 1.002365  1100
b 0.2386663 0.01139374 0.2176289 0.2309202 0.2382669 0.246107 0.2615254 1.000597  3000
plot(m$BUGSoutput$sims.matrix[,1],type="l")
abline(h=3,col="red")

plot(m$BUGSoutput$sims.matrix[,2],type="l")
abline(h=0.25,col="red")

Stan

########################################################################################
#############                          STAN                                ############# 
########################################################################################
y = rnorm(1000,3,2)
y.censored <- ifelse(y>=0, y, NA)
y.ind <- as.numeric(y>=0)
N = length(y)
model = "data {
int<lower=0> N_obs;
int<lower=0> N_cens;
real y_obs[N_obs];
real<lower=min(y_obs)> U;
}
parameters {
real<lower=U> y_cens[N_cens];
real mu;
real<lower=0> sigma2;
}
transformed parameters {
real<lower=0> tau;
tau = 1 / sigma2; 
}
model {
y_obs ~ normal(mu, sqrt(sigma2));
y_cens ~ normal(0, 10);
mu ~ normal(0,10);
sigma2 ~ gamma(1,10);
}"
data <- list(y_obs=y, y_cens=y.censored, N_obs = N, N_cens = N, U = 0)
fit <- stan(model_code = model,data = data, iter = 1000, warmup = 100, chains = 1)
recompiling to avoid crashing R session

SAMPLING FOR MODEL 'd2bd457994d20fed8470932d282d41fb' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 0.001 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 10 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: WARNING: There aren't enough warmup iterations to fit the
Chain 1:          three stages of adaptation as currently configured.
Chain 1:          Reducing each adaptation stage to 15%/75%/10% of
Chain 1:          the given number of warmup iterations:
Chain 1:            init_buffer = 15
Chain 1:            adapt_window = 75
Chain 1:            term_buffer = 10
Chain 1: 
Chain 1: Iteration:   1 / 1000 [  0%]  (Warmup)
Chain 1: Iteration: 100 / 1000 [ 10%]  (Warmup)
Chain 1: Iteration: 101 / 1000 [ 10%]  (Sampling)
Chain 1: Iteration: 200 / 1000 [ 20%]  (Sampling)
Chain 1: Iteration: 300 / 1000 [ 30%]  (Sampling)
Chain 1: Iteration: 400 / 1000 [ 40%]  (Sampling)
Chain 1: Iteration: 500 / 1000 [ 50%]  (Sampling)
Chain 1: Iteration: 600 / 1000 [ 60%]  (Sampling)
Chain 1: Iteration: 700 / 1000 [ 70%]  (Sampling)
Chain 1: Iteration: 800 / 1000 [ 80%]  (Sampling)
Chain 1: Iteration: 900 / 1000 [ 90%]  (Sampling)
Chain 1: Iteration: 1000 / 1000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 1.276 seconds (Warm-up)
Chain 1:                9.248 seconds (Sampling)
Chain 1:                10.524 seconds (Total)
Chain 1: 
fitd = extract(fit)
summary(fit,c("mu","tau"))
$summary
         mean     se_mean         sd      2.5%       25%       50%       75%     97.5%    n_eff      Rhat
mu  3.0484980 0.002172010 0.06059789 2.9380791 3.0056359 3.0450333 3.0883565 3.1745773 778.3791 0.9991415
tau 0.2475553 0.000325052 0.01004987 0.2281675 0.2408709 0.2475272 0.2536456 0.2684031 955.9056 0.9994369

$c_summary
, , chains = chain:1

         stats
parameter      mean         sd      2.5%       25%       50%       75%     97.5%
      mu  3.0484980 0.06059789 2.9380791 3.0056359 3.0450333 3.0883565 3.1745773
      tau 0.2475553 0.01004987 0.2281675 0.2408709 0.2475272 0.2536456 0.2684031
plot(fitd$mu,type="l")
abline(h=3,col="red")

plot(fitd$tau,type="l")
abline(h=0.25,col="red")

Nimble

mcmc.out$summary
      Mean    Median    St.Dev. 95%CI_low 95%CI_upp
a 3.064161 3.0637196 0.06354095 2.9384860 3.1910948
b 0.251025 0.2505204 0.01215968 0.2279741 0.2758376
plot(mcmc.out$samples[,1],type="l")
abline(h=3,col="red")

plot(mcmc.out$samples[,2],type="l")
abline(h=0.25,col="red")

Laplace

########################################################################################
#############                          LAPLACE                             ############# 
########################################################################################
d = ifelse(y < 0, 0, 1)
model <- function(p,y,d) {
  log_lik <- pnorm(0, p["mu"], 1/sqrt(p["tau"]), log = T)*sum(1-d) + sum(d * dnorm(y, p["mu"], 1/sqrt(p["tau"]), log = T))
  log_post <- log_lik + dnorm(p["mu"], 0, 10, log = T) + dgamma(p["tau"], 1, 10, log = T)
  log_post
}
inits <- c(mu = 0, tau = 1)
fit <- optim(inits, model, control = list(fnscale = -1), hessian = TRUE, y = y, d = d)
NaNs producedNaNs producedNaNs producedNaNs producedNaNs producedNaNs producedNaNs producedNaNs producedNaNs producedNaNs produced
param_mean <- fit$par
param_cov_mat <- solve(-fit$hessian)
round(param_mean, 2)
  mu  tau 
3.06 0.25 
mu = rnorm(1000,param_mean[1],param_cov_mat[1,1])
tau = rnorm(1000,param_mean[2],param_cov_mat[2,2])
samples <- rmvnorm(10000, param_mean, param_cov_mat)
hdi(samples)
            mu      tau
lower 2.941210 0.225592
upper 3.193083 0.271054
attr(,"credMass")
[1] 0.95
LS0tDQp0aXRsZTogIlIgTm90ZWJvb2siDQpvdXRwdXQ6IGh0bWxfbm90ZWJvb2sNCi0tLQ0KDQpgYGB7ciBzZXR1cCwgaW5jbHVkZT1GQUxTRX0NCmtuaXRyOjpvcHRzX2NodW5rJHNldChlY2hvID0gVFJVRSkNCmxpYnJhcnkoIlIyT3BlbkJVR1MiKQ0KbGlicmFyeSgicmphZ3MiKQ0KbGlicmFyeSgiZHBseXIiKQ0KbGlicmFyeSgicnN0YW4iKQ0KbGlicmFyeSgibmltYmxlIikNCmxpYnJhcnkoIkhESW50ZXJ2YWwiKQ0KbGlicmFyeSgibXZ0bm9ybSIpDQpgYGANCiAgICANCg0KI0J1Z3MNCmBgYHtyfQ0KDQojIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjDQojIyMjIyMjIyMjIyMjICAgICAgICAgICAgICAgICAgICAgICAgICBCVUdTICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAjIyMjIyMjIyMjIyMjIA0KIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw0KDQoNCnNldC5zZWVkKDIpDQoNCnkgPC0gcm5vcm0oMTAwLDMsMikNCnkuY2VucyA8LSBpZmVsc2UoeT4wLHksTkEpDQoNCg0Kc2luaygicmVncmVzc2lvbi50eHQiKQ0KY2F0KCIgDQogICAgbW9kZWwNCiAgICB7DQogICAgIyBwcmlvcnMNCiAgICBhIH4gZG5vcm0oMCwwLjAxKQ0KICAgIGIgfiBkZ2FtbWEoMSwxMCkNCiAgICANCiAgICAjIGxpa2VsaWhvb2QNCiAgICBmb3IoaSBpbiAxOk4pDQogICAgew0KICAgIHlbaV0gfiBkbm9ybShhLCBiKUMoMCwxKX0NCiAgICB9DQogICAgIikNCnNpbmsoKQ0KDQpteS5kYXRhIDwtIGxpc3QoeT15LmNlbnMsIE49MTAwKQ0KDQpwYXJhbXMgPC0gYygiYSIsICJiIikNCg0KaW5pdHMgPC0gZnVuY3Rpb24oKXtsaXN0IChiZXRhMD0xLGJldGExPTEpfQ0KDQpyZXMgPC0gYnVncyhkYXRhPW15LmRhdGEsIGluaXRzPWluaXRzLCBwYXJhbWV0ZXJzLnRvLnNhdmU9cGFyYW1zLA0KICAgICAgICAgICAgbi5pdGVyPTEwMDAwLCBuLmNoYWlucz0xLCBuLmJ1cm5pbj0xMDAwLA0KICAgICAgICAgICAgbW9kZWwuZmlsZT0icmVncmVzc2lvbi50eHQiLERJQz1GQUxTRSkNCnJlcyRzdW1tYXJ5DQoNCmhkaShyZXMkc2ltcy5tYXRyaXgpDQoNCnBsb3QocmVzJHNpbXMubWF0cml4WywxXSx0eXBlPSJsIikNCmFibGluZShoPTMsY29sPSJyZWQiKQ0KcGxvdChyZXMkc2ltcy5tYXRyaXhbLDJdLHR5cGU9ImwiKQ0KYWJsaW5lKGg9MC4yNSxjb2w9InJlZCIpDQpgYGANCg0KDQoNCiNKYWdzDQpgYGB7ciBjYXJzfQ0KDQojIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjDQojIyMjIyMjIyMjIyMjICAgICAgICAgICAgICAgICAgICAgICAgICBKQUdTICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAjIyMjIyMjIyMjIyMjIA0KIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw0KDQpzZXQuc2VlZCgyKQ0KDQp5ID0gcm5vcm0oMTAwMCwzLDIpDQp5LmNlbnNvcmVkIDwtIGlmZWxzZSh5Pj0wLCB5LCBOQSkNCnkuaW5kIDwtIGFzLm51bWVyaWMoeT49MCkNCk4gPSBsZW5ndGgoeSkNCg0KDQptb2RlbF9jb2RlPSAgJ21vZGVsIHsNCmZvciAoaiBpbiAxOk4pew0KeS5pbmRbal0gfiBkaW50ZXJ2YWwoeS5jZW5zb3JlZFtqXSwgMCkNCnkuY2Vuc29yZWRbal0gfiBkbm9ybShhLGIpDQp9DQphIH4gZG5vcm0oMCwgLjAwMDEpDQpiIH4gZGdhbW1hKDAuMSwgLjAxKQ0Kc2lnbWEyIDwtIDEvYg0KfScNCg0KdG9iaXQuaW5pdHMgPC0gZnVuY3Rpb24oKSB7DQogIGxpc3QoYT0wLCBiPTEsIA0KICAgICAgIHkuY2Vuc29yZWQ9aWZlbHNlKHkuaW5kLCBOQSwgLWFicyhybm9ybShOLCAwLCAxKSkpKQ0KfQ0KDQoNCm0gPC0gamFncyhtb2RlbC5maWxlPXRleHRDb25uZWN0aW9uKG1vZGVsX2NvZGUpLA0KICAgICAgICAgIHBhcmFtZXRlcnMudG8uc2F2ZSA9IGMoImEiLCAiYiIpLA0KICAgICAgICAgIGRhdGE9bGlzdCh5LmNlbnNvcmVkPXkuY2Vuc29yZWQsIHkuaW5kPXkuaW5kLCBOPU4pLA0KICAgICAgICAgIGluaXRzPXRvYml0LmluaXRzLERJQz1GQUxTRSkNCg0KbSRCVUdTb3V0cHV0JHN1bW1hcnkNCg0KcGxvdChtJEJVR1NvdXRwdXQkc2ltcy5tYXRyaXhbLDFdLHR5cGU9ImwiKQ0KYWJsaW5lKGg9Myxjb2w9InJlZCIpDQpwbG90KG0kQlVHU291dHB1dCRzaW1zLm1hdHJpeFssMl0sdHlwZT0ibCIpDQphYmxpbmUoaD0wLjI1LGNvbD0icmVkIikNCg0KYGBgDQoNCg0KI1N0YW4NCmBgYHtyfQ0KIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw0KIyMjIyMjIyMjIyMjIyAgICAgICAgICAgICAgICAgICAgICAgICAgU1RBTiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIyMjIyMjIyMjIyMjIyANCiMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMNCg0KeSA9IHJub3JtKDEwMDAsMywyKQ0KeS5jZW5zb3JlZCA8LSBpZmVsc2UoeT49MCwgeSwgTkEpDQp5LmluZCA8LSBhcy5udW1lcmljKHk+PTApDQpOID0gbGVuZ3RoKHkpDQoNCm1vZGVsID0gImRhdGEgew0KaW50PGxvd2VyPTA+IE5fb2JzOw0KaW50PGxvd2VyPTA+IE5fY2VuczsNCnJlYWwgeV9vYnNbTl9vYnNdOw0KcmVhbDxsb3dlcj1taW4oeV9vYnMpPiBVOw0KfQ0KcGFyYW1ldGVycyB7DQpyZWFsPGxvd2VyPVU+IHlfY2Vuc1tOX2NlbnNdOw0KcmVhbCBtdTsNCnJlYWw8bG93ZXI9MD4gc2lnbWEyOw0KfQ0KdHJhbnNmb3JtZWQgcGFyYW1ldGVycyB7DQpyZWFsPGxvd2VyPTA+IHRhdTsNCnRhdSA9IDEgLyBzaWdtYTI7IA0KfQ0KbW9kZWwgew0KeV9vYnMgfiBub3JtYWwobXUsIHNxcnQoc2lnbWEyKSk7DQp5X2NlbnMgfiBub3JtYWwoMCwgMTApOw0KbXUgfiBub3JtYWwoMCwxMCk7DQpzaWdtYTIgfiBnYW1tYSgxLDEwKTsNCn0iDQoNCg0KZGF0YSA8LSBsaXN0KHlfb2JzPXksIHlfY2Vucz15LmNlbnNvcmVkLCBOX29icyA9IE4sIE5fY2VucyA9IE4sIFUgPSAwKQ0KDQpmaXQgPC0gc3Rhbihtb2RlbF9jb2RlID0gbW9kZWwsZGF0YSA9IGRhdGEsIGl0ZXIgPSAxMDAwLCB3YXJtdXAgPSAxMDAsIGNoYWlucyA9IDEpDQoNCmZpdGQgPSBleHRyYWN0KGZpdCkNCg0Kc3VtbWFyeShmaXQsYygibXUiLCJ0YXUiKSkNCg0KcGxvdChmaXRkJG11LHR5cGU9ImwiKQ0KYWJsaW5lKGg9Myxjb2w9InJlZCIpDQpwbG90KGZpdGQkdGF1LHR5cGU9ImwiKQ0KYWJsaW5lKGg9MC4yNSxjb2w9InJlZCIpDQoNCg0KYGBgDQoNCg0KI05pbWJsZQ0KYGBge3J9DQojIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjDQojIyMjIyMjIyMjIyMjICAgICAgICAgICAgICAgICAgICAgICAgICBOSU1CTEUgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAjIyMjIyMjIyMjIyMjIA0KIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIw0KDQoNCnNldC5zZWVkKDA3MDQyMDE5KQ0KeSA9IHJub3JtKDEwMDAsMywyKQ0KeS5jZW5zb3JlZCA8LSBpZmVsc2UoeT49MCwgeSwgTkEpDQp5LmluZCA8LSBhcy5udW1lcmljKHk+PTApDQpOID0gbGVuZ3RoKHkpDQoNCnNpbXBsZUNvZGUxIDwtIG5pbWJsZUNvZGUoew0KICBmb3IgKGogaW4gMTpOKXsNCiAgICB5LmluZFtqXSB+IGRpbnRlcnZhbCh5LmNlbnNvcmVkW2pdLCAwKQ0KICAgIHkuY2Vuc29yZWRbal0gfiBkbm9ybShhLGIpDQogIH0NCiAgYSB+IGRub3JtKDAsIC4wMDAxKQ0KICBiIH4gZGdhbW1hKDAuMSwgLjAxKQ0KICBzaWdtYTIgPC0gMS9ifSkNCg0KdG9iaXQuaW5pdHMgPC0gZnVuY3Rpb24oKSB7DQogIGxpc3QoYT0wLCBiPTEsIA0KICAgICAgIHkuY2Vuc29yZWQ9aWZlbHNlKHkuaW5kLCBOQSwgLWFicyhybm9ybShOLCAwLCAxKSkpKQ0KfQ0KDQpzaW1wbGVNb2RlbDEgPC0gbmltYmxlTW9kZWwoc2ltcGxlQ29kZTEsDQogICAgICAgICAgICAgICAgICAgICAgICAgICAgZGF0YT1saXN0KHkuY2Vuc29yZWQ9eS5jZW5zb3JlZCwgeS5pbmQ9eS5pbmQsIE49TiksDQogICAgICAgICAgICAgICAgICAgICAgICAgICAgY29uc3RhbnRzID0gbGlzdChOID0gTiksDQogICAgICAgICAgICAgICAgICAgICAgICAgICAgaW5pdHMgPSBsaXN0KGE9MCwgYj0xLHkuY2Vuc29yZWQ9aWZlbHNlKHkuaW5kLCBOQSwgLWFicyhybm9ybShOLCAwLCAxKSkpKSkNCg0KI21jbWMub3V0IDwtIG5pbWJsZU1DTUMoY29kZSA9IHNpbXBsZU1vZGVsMSwgDQojICAgICAgICAgICAgICAgICAgICAgICBtb25pdG9ycz1jKCJhIiwgImIiLCJzaWdtYTIiKSwNCiMgICAgICAgICAgICAgICAgICAgICAgIG5jaGFpbnMgPSAxLCANCiMgICAgICAgICAgICAgICAgICAgICAgIG5pdGVyID0gMTAwMCwNCiMgICAgICAgICAgICAgICAgICAgICAgIHN1bW1hcnkgPSBUUlVFLCANCiMgICAgICAgICAgICAgICAgICAgICAgIHByb2dyZXNzQmFyID0gVFJVRSkNCiMNCg0KbWNtYy5vdXQkc3VtbWFyeQ0KDQpwbG90KG1jbWMub3V0JHNhbXBsZXNbLDFdLHR5cGU9ImwiKQ0KYWJsaW5lKGg9Myxjb2w9InJlZCIpDQpwbG90KG1jbWMub3V0JHNhbXBsZXNbLDJdLHR5cGU9ImwiKQ0KYWJsaW5lKGg9MC4yNSxjb2w9InJlZCIpDQoNCmBgYA0KDQoNCiNMYXBsYWNlDQpgYGB7cn0NCiMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMNCiMjIyMjIyMjIyMjIyMgICAgICAgICAgICAgICAgICAgICAgICAgIExBUExBQ0UgICAgICAgICAgICAgICAgICAgICAgICAgICAgICMjIyMjIyMjIyMjIyMgDQojIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjDQoNCmQgPSBpZmVsc2UoeSA8IDAsIDAsIDEpDQoNCm1vZGVsIDwtIGZ1bmN0aW9uKHAseSxkKSB7DQogIGxvZ19saWsgPC0gcG5vcm0oMCwgcFsibXUiXSwgMS9zcXJ0KHBbInRhdSJdKSwgbG9nID0gVCkqc3VtKDEtZCkgKyBzdW0oZCAqIGRub3JtKHksIHBbIm11Il0sIDEvc3FydChwWyJ0YXUiXSksIGxvZyA9IFQpKQ0KICBsb2dfcG9zdCA8LSBsb2dfbGlrICsgZG5vcm0ocFsibXUiXSwgMCwgMTAsIGxvZyA9IFQpICsgZGdhbW1hKHBbInRhdSJdLCAxLCAxMCwgbG9nID0gVCkNCiAgbG9nX3Bvc3QNCn0NCg0KaW5pdHMgPC0gYyhtdSA9IDAsIHRhdSA9IDEpDQpmaXQgPC0gb3B0aW0oaW5pdHMsIG1vZGVsLCBjb250cm9sID0gbGlzdChmbnNjYWxlID0gLTEpLCBoZXNzaWFuID0gVFJVRSwgeSA9IHksIGQgPSBkKQ0KcGFyYW1fbWVhbiA8LSBmaXQkcGFyDQpwYXJhbV9jb3ZfbWF0IDwtIHNvbHZlKC1maXQkaGVzc2lhbikNCnJvdW5kKHBhcmFtX21lYW4sIDIpDQoNCm11ID0gcm5vcm0oMTAwMCxwYXJhbV9tZWFuWzFdLHBhcmFtX2Nvdl9tYXRbMSwxXSkNCnRhdSA9IHJub3JtKDEwMDAscGFyYW1fbWVhblsyXSxwYXJhbV9jb3ZfbWF0WzIsMl0pDQoNCnNhbXBsZXMgPC0gcm12bm9ybSgxMDAwMCwgcGFyYW1fbWVhbiwgcGFyYW1fY292X21hdCkNCg0KaGRpKHNhbXBsZXMpDQoNCmBgYA0KDQo=