Загружаем пакеты
library("vars")
library("ggplot2")
library("mvtsplot")
library("MSBVAR")
## Warning: package 'MSBVAR' was built under R version 3.1.1
Пакет MSBVAR позволяет оценивать байесовские VAR с марковским переключением. Пакет bvarr реализует шесть разных предпосылок на априорное распределение и является переводом на R матлабовского кода Koop и Korobilis. Пакет bvarr пока нужно ставить с гитхаба:
library("devtools")
install_github("bdemeshev/bvarr")
Если он установлен, то используется он стандартно:
library("bvarr")
Строим необычный график для многомерных временных рядов
data(Canada)
mvtsplot(Canada)
plot(Canada)
help(Canada)
data(IsraelPalestineConflict)
mvtsplot(IsraelPalestineConflict)
Оцениваем байесовскую VAR
fit.BVAR <- szbvar(IsraelPalestineConflict, p=6,
lambda0=0.6, lambda1=0.1,
lambda3=2, lambda4=0.25, lambda5=0, mu5=0,
mu6=0, nu=3, qm=4, posterior.fit=FALSE)
summary(fit.BVAR)
## ------------------------------------------
## Sims-Zha Prior reduced form Bayesian VAR
## ------------------------------------------
## Prior form : Normal-inverse Wishart
## Prior hyperparameters :
## lambda0 = 0.6
## lambda1 = 0.1
## lambda3 = 2
## lambda4 = 0.25
## lambda5 = 0
## mu5 = 0
## mu6 = 0
## nu = 3
## ------------------------------------------
## Number of observations : 1275
## Degrees of freedom per equation : 1262
## ------------------------------------------
## Posterior Regression Coefficients :
## ------------------------------------------
## Autoregressive matrices:
## B(1)
## [,1] [,2]
## [1,] 0.6149 0.08589
## [2,] 0.2445 0.52433
##
## B(2)
## [,1] [,2]
## [1,] 0.01507 0.01402
## [2,] 0.10350 0.06058
##
## B(3)
## [,1] [,2]
## [1,] 0.0109 0.006883
## [2,] 0.0312 0.014888
##
## B(4)
## [,1] [,2]
## [1,] 0.005543 0.001732
## [2,] 0.011299 0.004835
##
## B(5)
## [,1] [,2]
## [1,] 0.001618 0.001180
## [2,] 0.003517 0.001799
##
## B(6)
## [,1] [,2]
## [1,] 0.001019 0.000493
## [2,] 0.001895 0.001063
##
## ------------------------------------------
## Constants
## -8.52 -2.633
## ------------------------------------------
## ------------------------------------------
## Posterior error covariance
## [,1] [,2]
## [1,] 4519 1292
## [2,] 1292 1154
##
## ------------------------------------------
posterior.impulses <- mc.irf(fit.BVAR, nsteps=10, draws=5000)
## Monte Carlo IRF Iteration = 1000
## Monte Carlo IRF Iteration = 2000
## Monte Carlo IRF Iteration = 3000
## Monte Carlo IRF Iteration = 4000
## Monte Carlo IRF Iteration = 5000
plot(posterior.impulses)
Оцениваем байесовскую SVAR
data(BCFdata)
head(Y)
## [1] -2.5806 -2.8823 -1.7428 -0.3978 -0.9772 -0.9365
head(z2)
## [1] 0 0 0 0 0 0
m <- ncol(Y)
ident <- diag(m)
ident[1,] <- 1
ident[2,1] <- 1
mvtsplot(Y)
# estimate the model's posterior moments
model <- szbsvar(Y, p=5, z=z2, lambda0=0.8, lambda1=0.1,
lambda3=1, lambda4=0.1, lambda5=0.05,
mu5=0, mu6=5, ident, qm=12)
## Estimating starting values for the numerical optimization
## of the log posterior of A(0)
## Estimating the final values for the numerical optimization
## of the log posterior of A(0)
## initial value 2.762930
## final value 2.762930
## converged
summary(model)
## ------------------------------------------
## A0 restriction matrix
## ------------------------------------------
## [,1] [,2] [,3]
## [1,] 1 1 1
## [2,] 1 1 0
## [3,] 0 0 1
##
## ------------------------------------------
## Sims-Zha Prior Bayesian Structural VAR
## ------------------------------------------
## Prior form : Sims-Zha
## Prior hyperparameters :
## lambda0 = 0.8
## lambda1 = 0.1
## lambda3 = 1
## lambda4 = 0.1
## lambda5 = 0.05
## mu5 = 0
## mu6 = 5
## nu = 4
## ------------------------------------------
## Number of observations : 116
## Degrees of freedom per equation :
## ------------------------------------------
## Posterior Regression Coefficients :
## ------------------------------------------
## Reduced Form Autoregressive matrices:
## B(1)
## [,1] [,2] [,3]
## [1,] 0.790285 -0.01999 -0.06799
## [2,] -0.032992 0.75380 -0.04276
## [3,] 0.007008 0.01197 0.84738
##
## B(2)
## [,1] [,2] [,3]
## [1,] 0.03430 0.022477 0.02073
## [2,] -0.05570 -0.046390 -0.04428
## [3,] 0.03249 0.002655 0.07767
##
## B(3)
## [,1] [,2] [,3]
## [1,] 0.02853 0.05165 -0.003116
## [2,] 0.07793 0.08867 0.004632
## [3,] -0.02803 -0.01341 0.012637
##
## B(4)
## [,1] [,2] [,3]
## [1,] 0.009182 -0.017264 -0.027171
## [2,] -0.006181 -0.018474 -0.055399
## [3,] -0.006835 -0.009102 0.005352
##
## B(5)
## [,1] [,2] [,3]
## [1,] -0.001115 0.007283 0.00670
## [2,] -0.000493 -0.003106 -0.02352
## [3,] -0.009684 0.003604 0.04347
##
## ------------------------------------------
## Reduced Form Constants
## -0.008907 -0.008216 0.01507
## ------------------------------------------
## ------------------------------------------
## Reduced Form Exogenous coefficients
## [,1] [,2] [,3]
## [1,] -0.008007 -0.0122298 -0.0005453
## [2,] -0.003951 0.0018615 -0.0071597
## [3,] 0.004919 0.0030684 -0.0020537
## [4,] -0.005048 -0.0072194 -0.0053027
## [5,] -0.002997 -0.0005685 -0.0039403
## [6,] -0.001559 0.0054264 0.0298527
## [7,] -0.001807 -0.0021317 -0.0112649
## [8,] -0.045237 -0.0180730 -0.0073190
## [9,] -0.015128 -0.0042850 0.0026818
##
## ------------------------------------------
## Structural Autoregressive matrices:
## A(1)
## [,1] [,2] [,3]
## [1,] 0.62421 -0.07273 0.004783
## [2,] 0.02564 -0.04104 0.025448
## [3,] 0.01929 0.05593 -0.021266
##
## A(2)
## [,1] [,2] [,3]
## [1,] 0.008308 -0.003727 -0.004824
## [2,] -0.001330 -0.000196 -0.007857
## [3,] 0.240773 -0.805187 -0.010689
##
## A(3)
## [,1] [,2] [,3]
## [1,] -0.01420 0.03350 0.006228
## [2,] -0.04661 -0.07198 0.006364
## [3,] 0.02078 0.01779 0.007711
##
## A(4)
## [,1] [,2] [,3]
## [1,] -0.008001 0.003143 -0.006497
## [2,] -0.022524 -0.014878 0.293957
## [3,] 0.007236 -0.015433 0.026985
##
## A(5)
## [,1] [,2] [,3]
## [1,] -0.001043 0.001711 0.004346
## [2,] -0.009413 -0.019226 0.001847
## [3,] 0.002323 -0.008159 0.015066
##
## ------------------------------------------
## Structural Constants
## -0.006512 0.0062 0.005215
## ------------------------------------------
## ------------------------------------------
## Structural Exogenous coefficients
## [,1] [,2] [,3]
## [1,] -0.005554 0.0106891 -0.0001999
## [2,] -0.003230 -0.0030638 -0.0024889
## [3,] 0.003688 -0.0018729 -0.0007058
## [4,] -0.003532 0.0062204 -0.0018462
## [5,] -0.002327 -0.0002328 -0.0013709
## [6,] -0.001566 -0.0061637 0.0103535
## [7,] -0.001292 0.0017488 -0.0039101
## [8,] -0.034539 0.0065114 -0.0025995
## [9,] -0.011660 0.0003200 0.0009100
##
## ------------------------------------------
## ------------------------------------------
## Posterior mode of the A0 matrix
## [,1] [,2] [,3]
## [1,] 0.78829 0.278 0.001341
## [2,] -0.06199 -1.056 0.000000
## [3,] 0.00000 0.000 0.346889
##
## ------------------------------------------
simulate A0 posterior Построение IRF требует на один шаг больше:
A0.post <- gibbs.A0(model,N1=1000,N2=5000)
## Normalization Method: DistanceMLA ( 0 )
## Gibbs Burn-in 10 % Complete
## Gibbs Burn-in 20 % Complete
## Gibbs Burn-in 30 % Complete
## Gibbs Burn-in 40 % Complete
## Gibbs Burn-in 50 % Complete
## Gibbs Burn-in 60 % Complete
## Gibbs Burn-in 70 % Complete
## Gibbs Burn-in 80 % Complete
## Gibbs Burn-in 90 % Complete
## Gibbs Burn-in 100 % Complete
## Gibbs Sampling 10 % Complete (500 draws)
## A0 log-det = 1.265853
## Gibbs Sampling 20 % Complete (1000 draws)
## A0 log-det = 1.258398
## Gibbs Sampling 30 % Complete (1500 draws)
## A0 log-det = 1.316095
## Gibbs Sampling 40 % Complete (2000 draws)
## A0 log-det = 1.202206
## Gibbs Sampling 50 % Complete (2500 draws)
## A0 log-det = 1.316423
## Gibbs Sampling 60 % Complete (3000 draws)
## A0 log-det = 1.403465
## Gibbs Sampling 70 % Complete (3500 draws)
## A0 log-det = 1.219451
## Gibbs Sampling 80 % Complete (4000 draws)
## A0 log-det = 1.395238
## Gibbs Sampling 90 % Complete (4500 draws)
## A0 log-det = 1.481735
## Gibbs Sampling 100 % Complete (5000 draws)
## A0 log-det = 1.279583
model.irfs <- mc.irf(model,nsteps=10,draws=5000,A0.posterior=A0.post)
## Monte Carlo IRF 10 % Complete
## Monte Carlo IRF 20 % Complete
## Monte Carlo IRF 30 % Complete
## Monte Carlo IRF 40 % Complete
## Monte Carlo IRF 50 % Complete
## Monte Carlo IRF 60 % Complete
## Monte Carlo IRF 70 % Complete
## Monte Carlo IRF 80 % Complete
## Monte Carlo IRF 90 % Complete
## Monte Carlo IRF 100 % Complete
plot(model.irfs)
Оценим одну из шести моделей из Koop и Korobilis
model <- bvar(Y, prior = "independent")
## Iteration 2000 out of 12000
## Iteration 4000 out of 12000
## Iteration 6000 out of 12000
## Iteration 8000 out of 12000
## Iteration 10000 out of 12000
## Iteration 12000 out of 12000
bvar.imp.plot(model)
Почиташки:
Шикарная виньетка дохлого MBR пакета. Смотрите на download, затем link.
Оригинальная статья Sim, Zha
Я заинтересован в развитии байесовских алгоритмов для временных рядов в R. Если есть желающие писать ВКР, не боящиеся прогать на R или C и шарящие в вероятностях — добро пожаловать!