# I will now run the Bayesian analysis of the Marseilles trial using JAGS and MCMC.
# This time I use a different Bayesian model which I think is more intuitive.
# Moreover, I add simulated one new patient from each group and print their joint distribution.
#
# This time I have created a "slab and spike" prior distribution on the unit square.
# It's a 50-50 mixture of a uniform density on the unit square, and a uniform density
# on a strip about the main diagonal of width sqrt 2 times 0.1.
setwd("/Users/richard/Desktop")
library('rjags')
## Loading required package: coda
## Linked to JAGS 4.3.0
## Loaded modules: basemod,bugs
nA <- 2
nB <- 15
jags <- jags.model('raoult5.bug',
data = list('nA' = nA, 'nB' = nB), n.chains = 4, n.adapt = 100)
## Compiling model graph
## Resolving undeclared variables
## Allocating nodes
## Graph information:
## Observed stochastic nodes: 2
## Unobserved stochastic nodes: 7
## Total graph size: 33
##
## Initializing model
jags
## JAGS model:
##
## model {
## kA <- 16
## kB <- 26
## lambdaAB ~ dunif(0, 1)
## epsilon ~ dunif(-0.05, 0.05)
## lambdaA ~ dunif(0, 1)
## lambdaB ~ dunif(0, 1)
## delta ~ dbern(0.5)
## pA <- lambdaA
## pB <- lambdaB
## pAB <- min(max(lambdaAB + epsilon, 0), 1)
## chi <- delta * pA + (1 - delta) * pAB
## tau <- delta * pB + (1 - delta) * pAB
## nA ~ dbin(chi, kA)
## nB ~ dbin(tau, kB)
## alpha ~ dbern(chi)
## beta ~ dbern(tau)
## AplusBplus <- (alpha == 1) * (beta == 1)
## AplusBmin <- (alpha == 1) * (beta == 0)
## AminBplus <- (alpha == 0) * (beta == 1)
## AminBmin <- (alpha == 0) * (beta == 0)
## }
## Fully observed variables:
## nA nB
update(jags, 1000)
jags.samples(jags, c('alpha', 'beta', 'delta'), 1000)
## $alpha
## mcarray:
## [1] 0.16975
##
## Marginalizing over: iteration(1000),chain(4)
##
## $beta
## mcarray:
## [1] 0.552
##
## Marginalizing over: iteration(1000),chain(4)
##
## $delta
## mcarray:
## [1] 0.966
##
## Marginalizing over: iteration(1000),chain(4)
samples <- coda.samples(jags, c('alpha', 'beta', 'delta'), 1000)
summary(samples)
##
## Iterations = 2101:3100
## Thinning interval = 1
## Number of chains = 4
## Sample size per chain = 1000
##
## 1. Empirical mean and standard deviation for each variable,
## plus standard error of the mean:
##
## Mean SD Naive SE Time-series SE
## alpha 0.1865 0.3896 0.006159 0.006758
## beta 0.5585 0.4966 0.007852 0.007758
## delta 0.9523 0.2133 0.003372 0.011436
##
## 2. Quantiles for each variable:
##
## 2.5% 25% 50% 75% 97.5%
## alpha 0 0 0 0 1
## beta 0 0 1 1 1
## delta 0 1 1 1 1
plot(samples)

jags.samples(jags, c('AplusBplus', 'AplusBmin', 'AminBplus', 'AminBmin'), 1000)
## $AminBmin
## mcarray:
## [1] 0.355
##
## Marginalizing over: iteration(1000),chain(4)
##
## $AminBplus
## mcarray:
## [1] 0.4715
##
## Marginalizing over: iteration(1000),chain(4)
##
## $AplusBmin
## mcarray:
## [1] 0.07925
##
## Marginalizing over: iteration(1000),chain(4)
##
## $AplusBplus
## mcarray:
## [1] 0.09425
##
## Marginalizing over: iteration(1000),chain(4)
samples <- coda.samples(jags, c('AplusBplus', 'AplusBmin', 'AminBplus', 'AminBmin'), 1000)
summary(samples)
##
## Iterations = 4101:5100
## Thinning interval = 1
## Number of chains = 4
## Sample size per chain = 1000
##
## 1. Empirical mean and standard deviation for each variable,
## plus standard error of the mean:
##
## Mean SD Naive SE Time-series SE
## AminBmin 0.3545 0.4784 0.007565 0.008073
## AminBplus 0.4735 0.4994 0.007896 0.008013
## AplusBmin 0.0705 0.2560 0.004048 0.004187
## AplusBplus 0.1015 0.3020 0.004775 0.004690
##
## 2. Quantiles for each variable:
##
## 2.5% 25% 50% 75% 97.5%
## AminBmin 0 0 0 1 1
## AminBplus 0 0 0 1 1
## AplusBmin 0 0 0 0 1
## AplusBplus 0 0 0 0 1