# I will now run the Bayesian analysis of the Marseilles trial using JAGS and MCMC
# This time I use a different Bayesian model which I think is more intuitive
setwd("/Users/richard")
library('rjags')
## Loading required package: coda
## Linked to JAGS 4.3.0
## Loaded modules: basemod,bugs
nA <- 2
nB <- 15
jags <- jags.model('raoult2.bug',
data = list('nA' = nA, 'nB' = nB), n.chains = 4, n.adapt = 100)
## Compiling model graph
## Resolving undeclared variables
## Allocating nodes
## Graph information:
## Observed stochastic nodes: 2
## Unobserved stochastic nodes: 4
## Total graph size: 26
##
## Initializing model
jags
## JAGS model:
##
## model {
## kA <- 16
## kB <- 26
## lambdaAB ~ dnorm(0, 1)
## lambdaA ~ dnorm(0, 1)
## lambdaB ~ dnorm(0, 1)
## delta ~ dbern(0.5)
## pA <- exp(lambdaA) / (1 + exp(lambdaA))
## pB <- exp(lambdaB) / (1 + exp(lambdaB))
## pAB <- exp(lambdaAB) / (1 + exp(lambdaAB))
## nA ~ dbin(delta * pA + (1 - delta) * pAB, kA)
## nB ~ dbin(delta * pB + (1 - delta) * pAB, kB)
## }
## Fully observed variables:
## nA nB
update(jags, 1000)
jags.samples(jags, c('pA', 'pB', 'delta'), 1000)
## $delta
## mcarray:
## [1] 0.94325
##
## Marginalizing over: iteration(1000),chain(4)
##
## $pA
## mcarray:
## [1] 0.2281439
##
## Marginalizing over: iteration(1000),chain(4)
##
## $pB
## mcarray:
## [1] 0.563168
##
## Marginalizing over: iteration(1000),chain(4)
samples <- coda.samples(jags, c('pA', 'pB', 'delta'), 1000)
summary(samples)
##
## Iterations = 2101:3100
## Thinning interval = 1
## Number of chains = 4
## Sample size per chain = 1000
##
## 1. Empirical mean and standard deviation for each variable,
## plus standard error of the mean:
##
## Mean SD Naive SE Time-series SE
## delta 0.9640 0.1863 0.002946 0.007082
## pA 0.2196 0.1065 0.001683 0.003426
## pB 0.5661 0.0939 0.001485 0.001973
##
## 2. Quantiles for each variable:
##
## 2.5% 25% 50% 75% 97.5%
## delta 0.0000 1.0000 1.0000 1.0000 1.0000
## pA 0.0737 0.1503 0.2037 0.2669 0.4558
## pB 0.3808 0.5031 0.5698 0.6298 0.7376
plot(samples)
