library(LaplacesDemon)
LaplacesDemon : Complete Environment for Bayesian Inference
Version: 12.12.03
"Probability theory is nothing but common sense reduced to calculation" (Pierre-Simon Laplace, 1814).
Laplace's Demon is ready for you.
############################## Demon Data ###############################
data(demonsnacks)
y <- log(demonsnacks$Calories)
X <- cbind(1, as.matrix(demonsnacks[,c(7,8,10)]))
J <- ncol(X)
for (j in 2:J) {X[,j] <- CenterScale(X[,j])}
mon.names <- c("LP","sigma")
parm.names <- as.parm.names(list(beta=rep(0,J), log.sigma=0))
PGF <- function(Data) return(c(rnormv(Data$J,0,10),
log(rhalfcauchy(1,25))))
MyData <- list(J=J, PGF=PGF, X=X, mon.names=mon.names,
parm.names=parm.names, y=y)
########################## Model Specification ##########################
Model <- function(parm, Data)
{
### Parameters
beta <- parm[1:Data$J]
sigma <- exp(parm[Data$J+1])
### Log of Prior Densities
beta.prior <- sum(dnormv(beta, 0, 1000, log=TRUE))
sigma.prior <- dhalfcauchy(sigma, 25, log=TRUE)
### Log-Likelihood
mu <- tcrossprod(Data$X, t(beta))
LL <- sum(dnorm(Data$y, mu, sigma, log=TRUE))
### Log-Posterior
LP <- LL + beta.prior + sigma.prior
Modelout <- list(LP=LP, Dev=-2*LL, Monitor=c(LP,sigma),
yhat=rnorm(length(mu), mu, sigma), parm=parm)
return(Modelout)
}
set.seed(666)
############################ Initial Values #############################
Initial.Values <- GIV(Model, MyData, PGF=TRUE)
###########################################################################
# Examples of MCMC Algorithms #
###########################################################################
######################## Hit-And-Run Metropolis #########################
Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
Covar=NULL, Iterations=1000, Status=100, Thinning=1,
Algorithm="HARM", Specs=NULL)
Laplace's Demon was called on Sat Dec 29 20:34:16 2012
Performing initial checks...
Algorithm: Hit-And-Run Metropolis
Laplace's Demon is beginning to update...
Iteration: 100, Proposal: Multivariate
Iteration: 200, Proposal: Multivariate
Iteration: 300, Proposal: Multivariate
Iteration: 400, Proposal: Multivariate
Iteration: 500, Proposal: Multivariate
Iteration: 600, Proposal: Multivariate
Iteration: 700, Proposal: Multivariate
Iteration: 800, Proposal: Multivariate
Iteration: 900, Proposal: Multivariate
Iteration: 1000, Proposal: Multivariate
Assessing Stationarity
Assessing Thinning and ESS
Creating Summaries
Estimating Log of the Marginal Likelihood
Creating Output
Laplace's Demon has finished.
Fit
Call:
LaplacesDemon(Model = Model, Data = MyData, Initial.Values = Initial.Values,
Covar = NULL, Iterations = 1000, Status = 100, Thinning = 1,
Algorithm = "HARM", Specs = NULL)
Acceptance Rate: 0.268
Adaptive: 1001
Algorithm: Hit-And-Run Metropolis
Covariance Matrix: (NOT SHOWN HERE; diagonal shown instead)
beta[1] beta[2] beta[3] beta[4] log.sigma
0.2268 0.2268 0.2268 0.2268 0.2268
Covariance (Diagonal) History: (NOT SHOWN HERE)
Deviance Information Criterion (DIC):
All Stationary
Dbar 102.2 80.95
pD 2176.5 3.15
DIC 2278.7 84.10
Delayed Rejection (DR): 0
Initial Values:
[1] 2.3822 6.3699 -1.1230 6.4136 -0.6482
Iterations: 1000
Log(Marginal Likelihood): -41.3
Minutes of run-time: 0.12
Model: (NOT SHOWN HERE)
Monitor: (NOT SHOWN HERE)
Parameters (Number of): 5
Periodicity: 1001
Posterior1: (NOT SHOWN HERE)
Posterior2: (NOT SHOWN HERE)
Recommended Burn-In of Thinned Samples: 501
Recommended Burn-In of Un-thinned Samples: 501
Recommended Thinning: 30
Status is displayed every 100 iterations
Summary1: (SHOWN BELOW)
Summary2: (SHOWN BELOW)
Thinned Samples: 1000
Thinning: 1
Summary of All Samples
Mean SD MCSE ESS LB Median UB
beta[1] 4.9139 0.4692 0.1278 26.164 3.3052 5.0075 5.3267
beta[2] 2.0763 1.3842 0.4497 3.910 1.0526 1.5503 6.4136
beta[3] 0.3339 0.4653 0.1447 6.648 -0.8774 0.4382 0.9443
beta[4] 1.3533 1.2160 0.3812 5.643 0.5143 0.9640 5.7560
log.sigma -0.1919 0.4504 0.1443 4.535 -0.5543 -0.3519 1.2435
Deviance 102.2171 65.9773 16.7073 18.447 77.8583 82.2143 215.4516
LP -72.2895 32.9969 8.3570 18.435 -128.9449 -62.2828 -60.1057
sigma 0.9483 0.6917 0.2174 9.705 0.5745 0.7033 3.4678
Summary of Stationary Samples
Mean SD MCSE ESS LB Median UB
beta[1] 5.0225 0.08604 0.02177 26.749 4.8538 5.0270 5.1627
beta[2] 1.4178 0.21248 0.08174 8.359 0.8241 1.4431 1.8226
beta[3] 0.4887 0.13452 0.03676 17.814 0.2806 0.4382 0.8093
beta[4] 1.0456 0.22370 0.07473 9.884 0.5978 1.0045 1.5392
log.sigma -0.3788 0.10635 0.03493 16.242 -0.5543 -0.3949 -0.1683
Deviance 80.9499 2.50984 0.94520 5.256 77.8589 80.8718 86.6694
LP -61.6518 1.25481 8.35703 5.253 -64.5115 -61.6127 -60.1060
sigma 0.6885 0.07435 0.21740 16.008 0.5745 0.6730 0.8451
print(Fit)
Call:
LaplacesDemon(Model = Model, Data = MyData, Initial.Values = Initial.Values,
Covar = NULL, Iterations = 1000, Status = 100, Thinning = 1,
Algorithm = "HARM", Specs = NULL)
Acceptance Rate: 0.268
Adaptive: 1001
Algorithm: Hit-And-Run Metropolis
Covariance Matrix: (NOT SHOWN HERE; diagonal shown instead)
beta[1] beta[2] beta[3] beta[4] log.sigma
0.2268 0.2268 0.2268 0.2268 0.2268
Covariance (Diagonal) History: (NOT SHOWN HERE)
Deviance Information Criterion (DIC):
All Stationary
Dbar 102.2 80.95
pD 2176.5 3.15
DIC 2278.7 84.10
Delayed Rejection (DR): 0
Initial Values:
[1] 2.3822 6.3699 -1.1230 6.4136 -0.6482
Iterations: 1000
Log(Marginal Likelihood): -41.3
Minutes of run-time: 0.12
Model: (NOT SHOWN HERE)
Monitor: (NOT SHOWN HERE)
Parameters (Number of): 5
Periodicity: 1001
Posterior1: (NOT SHOWN HERE)
Posterior2: (NOT SHOWN HERE)
Recommended Burn-In of Thinned Samples: 501
Recommended Burn-In of Un-thinned Samples: 501
Recommended Thinning: 30
Status is displayed every 100 iterations
Summary1: (SHOWN BELOW)
Summary2: (SHOWN BELOW)
Thinned Samples: 1000
Thinning: 1
Summary of All Samples
Mean SD MCSE ESS LB Median UB
beta[1] 4.9139 0.4692 0.1278 26.164 3.3052 5.0075 5.3267
beta[2] 2.0763 1.3842 0.4497 3.910 1.0526 1.5503 6.4136
beta[3] 0.3339 0.4653 0.1447 6.648 -0.8774 0.4382 0.9443
beta[4] 1.3533 1.2160 0.3812 5.643 0.5143 0.9640 5.7560
log.sigma -0.1919 0.4504 0.1443 4.535 -0.5543 -0.3519 1.2435
Deviance 102.2171 65.9773 16.7073 18.447 77.8583 82.2143 215.4516
LP -72.2895 32.9969 8.3570 18.435 -128.9449 -62.2828 -60.1057
sigma 0.9483 0.6917 0.2174 9.705 0.5745 0.7033 3.4678
Summary of Stationary Samples
Mean SD MCSE ESS LB Median UB
beta[1] 5.0225 0.08604 0.02177 26.749 4.8538 5.0270 5.1627
beta[2] 1.4178 0.21248 0.08174 8.359 0.8241 1.4431 1.8226
beta[3] 0.4887 0.13452 0.03676 17.814 0.2806 0.4382 0.8093
beta[4] 1.0456 0.22370 0.07473 9.884 0.5978 1.0045 1.5392
log.sigma -0.3788 0.10635 0.03493 16.242 -0.5543 -0.3949 -0.1683
Deviance 80.9499 2.50984 0.94520 5.256 77.8589 80.8718 86.6694
LP -61.6518 1.25481 8.35703 5.253 -64.5115 -61.6127 -60.1060
sigma 0.6885 0.07435 0.21740 16.008 0.5745 0.6730 0.8451
Consort(Fit)
#############################################################
# Consort with Laplace's Demon #
#############################################################
Call:
LaplacesDemon(Model = Model, Data = MyData, Initial.Values = Initial.Values,
Covar = NULL, Iterations = 1000, Status = 100, Thinning = 1,
Algorithm = "HARM", Specs = NULL)
Acceptance Rate: 0.268
Adaptive: 1001
Algorithm: Hit-And-Run Metropolis
Covariance Matrix: (NOT SHOWN HERE; diagonal shown instead)
beta[1] beta[2] beta[3] beta[4] log.sigma
0.2268 0.2268 0.2268 0.2268 0.2268
Covariance (Diagonal) History: (NOT SHOWN HERE)
Deviance Information Criterion (DIC):
All Stationary
Dbar 102.2 80.95
pD 2176.5 3.15
DIC 2278.7 84.10
Delayed Rejection (DR): 0
Initial Values:
[1] 2.3822 6.3699 -1.1230 6.4136 -0.6482
Iterations: 1000
Log(Marginal Likelihood): -41.3
Minutes of run-time: 0.12
Model: (NOT SHOWN HERE)
Monitor: (NOT SHOWN HERE)
Parameters (Number of): 5
Periodicity: 1001
Posterior1: (NOT SHOWN HERE)
Posterior2: (NOT SHOWN HERE)
Recommended Burn-In of Thinned Samples: 501
Recommended Burn-In of Un-thinned Samples: 501
Recommended Thinning: 30
Status is displayed every 100 iterations
Summary1: (SHOWN BELOW)
Summary2: (SHOWN BELOW)
Thinned Samples: 1000
Thinning: 1
Summary of All Samples
Mean SD MCSE ESS LB Median UB
beta[1] 4.9139 0.4692 0.1278 26.164 3.3052 5.0075 5.3267
beta[2] 2.0763 1.3842 0.4497 3.910 1.0526 1.5503 6.4136
beta[3] 0.3339 0.4653 0.1447 6.648 -0.8774 0.4382 0.9443
beta[4] 1.3533 1.2160 0.3812 5.643 0.5143 0.9640 5.7560
log.sigma -0.1919 0.4504 0.1443 4.535 -0.5543 -0.3519 1.2435
Deviance 102.2171 65.9773 16.7073 18.447 77.8583 82.2143 215.4516
LP -72.2895 32.9969 8.3570 18.435 -128.9449 -62.2828 -60.1057
sigma 0.9483 0.6917 0.2174 9.705 0.5745 0.7033 3.4678
Summary of Stationary Samples
Mean SD MCSE ESS LB Median UB
beta[1] 5.0225 0.08604 0.02177 26.749 4.8538 5.0270 5.1627
beta[2] 1.4178 0.21248 0.08174 8.359 0.8241 1.4431 1.8226
beta[3] 0.4887 0.13452 0.03676 17.814 0.2806 0.4382 0.8093
beta[4] 1.0456 0.22370 0.07473 9.884 0.5978 1.0045 1.5392
log.sigma -0.3788 0.10635 0.03493 16.242 -0.5543 -0.3949 -0.1683
Deviance 80.9499 2.50984 0.94520 5.256 77.8589 80.8718 86.6694
LP -61.6518 1.25481 8.35703 5.253 -64.5115 -61.6127 -60.1060
sigma 0.6885 0.07435 0.21740 16.008 0.5745 0.6730 0.8451
Demonic Suggestion
Due to the combination of the following conditions,
1. Hit-And-Run Metropolis
2. The acceptance rate (0.268) is within the interval [0.15,0.5].
3. At least one target MCSE is >= 6.27% of its marginal posterior
standard deviation.
4. At least one target distribution has an effective sample size
(ESS) less than 100.
5. Each target distribution became stationary by
501 iterations.
Laplace's Demon has not been appeased, and suggests
copy/pasting the following R code into the R console,
and running it.
Initial.Values <- as.initial.values(Fit)
Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
Covar=NULL, Iterations=30000, Status=8333, Thinning=30,
Algorithm="HARM", Specs=NULL)
Laplace's Demon is finished consorting.
PosteriorChecks(Fit)
$Posterior.Correlation
beta[1] beta[2] beta[3] beta[4] log.sigma LP sigma
beta[1] 1.000000 -0.06855 -0.005885 0.2782 0.17386 0.2304 0.17316
beta[2] -0.068545 1.00000 -0.154074 -0.1803 0.02199 0.4162 0.01751
beta[3] -0.005885 -0.15407 1.000000 -0.0966 -0.04082 0.0131 -0.03442
beta[4] 0.278237 -0.18032 -0.096603 1.0000 0.32312 -0.1128 0.30808
log.sigma 0.173862 0.02199 -0.040816 0.3231 1.00000 -0.2536 0.93540
LP 0.230409 0.41622 0.013099 -0.1128 -0.25360 1.0000 -0.29140
sigma 0.173164 0.01751 -0.034424 0.3081 0.93540 -0.2914 1.00000
$Posterior.Summary
p(theta > 0) N.Modes Kurtosis Skewness Burn-In IAT
beta[1] 1 2 -0.376 0.029 1 16.55
beta[2] 1 2 0.874 -0.478 1 72.34
beta[3] 1 2 -0.049 0.436 1 19.16
beta[4] 1 2 0.160 0.130 1 31.05
log.sigma 0 3 -0.685 0.279 1 31.31
LP 0 2 0.301 -0.747 1 51.98
sigma 1 2 -0.546 0.478 1 32.63
attr(,"class")
[1] "posteriorchecks"
caterpillar.plot(Fit, Parms="beta")
BurnIn <- Fit$Rec.BurnIn.Thinned
plot(Fit, BurnIn, MyData, PDF=FALSE)
Pred <- predict(Fit, Model, MyData)
summary(Pred, Discrep="Chi-Square")
Bayesian Predictive Information Criterion:
Dbar pD BPIC
80.94 3.154 87.25
Concordance: 0.9487
Discrepancy Statistic: 31.65
L-criterion: 35.72, S.L: 0.322
Records:
y Mean SD LB Median UB PQ Discrep
1 4.174 4.308 0.721 2.881 4.312 5.833 0.576 0.034
2 5.361 5.412 0.719 3.935 5.435 6.752 0.526 0.005
3 6.089 5.160 0.738 3.747 5.171 6.590 0.100 1.584
4 5.298 5.145 0.736 3.815 5.126 6.596 0.412 0.044
5 4.407 4.230 0.685 2.929 4.226 5.553 0.404 0.067
6 2.197 3.721 0.719 2.321 3.735 5.077 0.980 4.493
7 5.011 4.391 0.743 2.845 4.414 5.844 0.186 0.695
8 1.609 3.839 0.721 2.577 3.814 5.288 1.000 9.563
9 4.344 4.382 0.693 3.036 4.389 5.691 0.526 0.003
10 4.812 4.521 0.666 3.245 4.473 5.874 0.330 0.192
11 4.190 4.308 0.681 3.037 4.315 5.684 0.562 0.030
12 4.920 4.364 0.679 3.041 4.344 5.733 0.192 0.671
13 4.754 4.194 0.681 2.927 4.170 5.494 0.198 0.676
14 4.127 4.208 0.726 2.911 4.184 5.668 0.528 0.012
15 3.714 3.965 0.733 2.490 3.985 5.348 0.642 0.118
16 4.673 4.475 0.687 3.077 4.501 5.747 0.404 0.083
17 6.930 7.122 0.711 5.800 7.164 8.417 0.614 0.073
18 5.069 4.585 0.685 3.163 4.616 5.827 0.248 0.498
19 6.775 6.460 0.725 5.149 6.415 8.016 0.292 0.189
20 6.554 6.759 0.759 5.280 6.825 8.184 0.628 0.073
21 4.890 4.830 0.675 3.601 4.831 6.181 0.460 0.008
22 4.443 4.443 0.698 3.004 4.464 5.710 0.512 0.000
23 2.833 4.114 0.697 2.798 4.117 5.529 0.972 3.382
24 4.787 4.614 0.672 3.217 4.640 5.867 0.428 0.067
25 6.933 7.210 0.707 5.786 7.186 8.587 0.634 0.153
26 6.180 6.263 0.707 4.904 6.255 7.615 0.544 0.014
27 5.652 5.103 0.709 3.696 5.086 6.520 0.220 0.600
28 5.429 4.535 0.729 3.039 4.583 5.922 0.096 1.508
29 5.635 6.038 0.844 4.405 6.052 7.653 0.680 0.229
30 4.263 3.993 0.732 2.557 3.956 5.406 0.360 0.135
31 3.892 4.245 0.718 2.811 4.262 5.687 0.682 0.242
32 6.613 6.522 0.786 5.049 6.562 7.935 0.470 0.013
33 4.920 4.254 0.714 2.942 4.243 5.712 0.166 0.871
34 6.541 6.120 0.762 4.779 6.135 7.589 0.302 0.306
35 6.346 5.857 0.737 4.264 5.879 7.166 0.244 0.441
36 3.738 4.332 0.733 2.995 4.285 5.855 0.808 0.658
37 7.356 8.183 0.806 6.454 8.210 9.773 0.858 1.051
38 5.740 4.724 0.708 3.271 4.716 6.045 0.088 2.061
39 5.517 4.876 0.715 3.528 4.919 6.326 0.168 0.804
plot(Pred, Style="Covariates", Data=MyData)
plot(Pred, Style="Density", Rows=1:9)
plot(Pred, Style="ECDF")
plot(Pred, Style="Fitted")
plot(Pred, Style="Jarque-Bera")
plot(Pred, Style="Predictive Quantiles")
plot(Pred, Style="Residual Density")
plot(Pred, Style="Residuals")
Levene.Test(Pred)
[1] 0.564
Importance(Fit, Model, MyData, Discrep="Chi-Square")
X has 4 variables
Estimating the full model...
Estimating without X[,1]...
Estimating without X[,2]...
Estimating without X[,3]...
Estimating without X[,4]...
S.L: 0.32 0.657 0.507 0.361 0.433
BPIC Concordance Discrep L-criterion
Full 87.25 0.949 34.54 35.64
X[,-1] 202015.01 0.000 2021.01 198.60
X[,-2] 234.55 0.872 72.32 42.80
X[,-3] 100.22 0.923 37.30 36.02
X[,-4] 127.62 0.897 50.88 38.82
attr(,"S.L")
[1] 0.320 0.657 0.507 0.361 0.433
attr(,"class")
[1] "importance"
################## Adaptive Hamiltonian Monte Carlo #####################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="AHMC", Specs=list(epsilon=rep(0.02, length(Initial.Values)),
# L=2, Periodicity=10))
########################## Adaptive Metropolis ##########################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="AM", Specs=list(Adaptive=500, Periodicity=10))
################### Adaptive Metropolis-within-Gibbs ####################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="AMWG", Specs=list(Periodicity=50))
###################### Adaptive-Mixture Metropolis ######################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="AMM", Specs=list(Adaptive=500, Periodicity=10, w=0.05))
################# Componentwise Hit-And-Run Metropolis ##################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="CHARM", Specs=NULL)
################# Delayed Rejection Adaptive Metropolis #################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="DRAM", Specs=list(Adaptive=500, Periodicity=10))
##################### Delayed Rejection Metropolis ######################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="DRM", Specs=NULL)
####################### Hamiltonian Monte Carlo #########################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="HMC", Specs=list(epsilon=rep(0.02, length(Initial.Values)),
# L=2))
############# Hamiltonian Monte Carlo with Dual-Averaging ###############
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="HMCDA", Specs=list(A=500, delta=0.65, epsilon=NULL,
# Lmax=1000, lambda=0.1))
######################## Independence Metropolis ########################
### Note: the mu and Covar arguments are populated from a previous Laplace
### Approximation.
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=Fit$Covar, Iterations=1000, Status=100, Thinning=1,
# Algorithm="IM", Specs=list(mu=Fit$Summary1[,1]))
####################### Metropolis-within-Gibbs #########################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="MWG", Specs=NULL)
########################## No-U-Turn Sampler ############################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=100, Status=10, Thinning=1,
# Algorithm="NUTS", Specs=list(A=50, delta=0.6, epsilon=NULL))
###################### Robust Adaptive Metropolis #######################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="RAM", Specs=list(alpha.star=0.234, Dist="N", gamma=0.66,
# Periodicity=10))
########################### Reversible-Jump #############################
#bin.n <- J-1
#bin.p <- 0.2
#parm.p <- c(1, rep(1/(J-1),(J-1)), 1)
#selectable <- c(0, rep(1,J-1), 0)
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="RJ", Specs=list(bin.n=bin.n, bin.p=bin.p,
# parm.p=parm.p, selectable=selectable,
# selected=c(0,rep(1,J-1),0)))
######################## Random-Walk Metropolis #########################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="RWM", Specs=NULL)
############## Sequential Adaptive Metropolis-within-Gibbs ##############
#NOTE: The SAMWG algorithm is only for state-space models (SSMs)
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="SAMWG", Specs=list(Dyn=Dyn, Periodicity=50))
################## Sequential Metropolis-within-Gibbs ###################
#NOTE: The SMWG algorithm is only for state-space models (SSMs)
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="SMWG", Specs=list(Dyn=Dyn))
################### Tempered Hamiltonian Monte Carlo ####################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="THMC", Specs=list(epsilon=rep(0.05,length(Initial.Values)),
# L=2, Temperature=2))
############################### t-walk #################################
#Fit <- LaplacesDemon(Model, Data=MyData, Initial.Values,
# Covar=NULL, Iterations=1000, Status=100, Thinning=1,
# Algorithm="twalk", Specs=list(SIV=NULL, n1=4, at=6, aw=1.5))
########## Updating Sequential Adaptive Metropolis-within-Gibbs #########
#NOTE: The USAMWG algorithm is only for state-space model updating
#Fit <- LaplacesDemon(Model, MyData, Initial.Values,
# Covar=NULL, Iterations=100000, Status=100, Thinning=100,
# Algorithm="USAMWG", Specs=list(Dyn=Dyn, Periodicity=50, Fit=Fit,
# Begin=T.m))
############## Updating Sequential Metropolis-within-Gibbs ##############
#NOTE: The USMWG algorithm is only for state-space model updating
#Fit <- LaplacesDemon(Model, MyData, Initial.Values,
# Covar=NULL, Iterations=100000, Status=100, Thinning=100,
# Algorithm="USMWG", Specs=list(Dyn=Dyn, Fit=Fit, Begin=T.m))
#End