Econ 5316 time Series Econometrics(Spring 2017) / Homework6 Problem1 Question e Keunyoung (Kay) Kim
This augmented Vector Autoregression (VAR) model consider the third variable, Leading Index for the United States, FRED/USSLIND.
For this, we construct additional R-code and estimate VAR model. Please refer to eco5316sp2017_HW6P1_e.Rmd for R code and eco5316sp2017_HW6P1_e.pdf for the result.
We re-estimate out VAR, with a third variable, Leading Index for the United States, FRED/USSLIND. As shown the graph below, Leading Index does not have trend. So we did not take log and difference. We construct the process of previous bivariate VAR model. We choose 1 as a lag, p to minimize, AIC and construct multivariate order 1 VAR model, VAR(1).
## $selection
## AIC(n) HQ(n) SC(n) FPE(n)
## 1 1 1 1
##
## $criteria
## 1 2 3 4 5 6
## AIC(n) 6.258899 6.285020 6.340281 6.444345 6.533676 6.582492
## HQ(n) 6.367536 6.475135 6.611874 6.797415 6.968224 7.098517
## SC(n) 6.526277 6.752931 7.008726 7.313323 7.603188 7.852537
## FPE(n) 522.675274 536.650773 567.498819 630.462156 690.640684 727.141389
## 7 8
## AIC(n) 6.521041 6.510233
## HQ(n) 7.118544 7.189214
## SC(n) 7.991619 8.181344
## FPE(n) 686.344250 682.310184
##
## VAR Estimation Results:
## =======================
##
## Estimated coefficients for equation y1:
## =======================================
## Call:
## y1 = y1.l1 + y2.l1 + y3.l1 + const
##
## y1.l1 y2.l1 y3.l1 const
## 0.138590810 0.009981672 1.321740001 0.552898297
##
##
## Estimated coefficients for equation y2:
## =======================================
## Call:
## y2 = y1.l1 + y2.l1 + y3.l1 + const
##
## y1.l1 y2.l1 y3.l1 const
## 0.06221355 0.02722750 4.09985427 -0.13383236
##
##
## Estimated coefficients for equation y3:
## =======================================
## Call:
## y3 = y1.l1 + y2.l1 + y3.l1 + const
##
## y1.l1 y2.l1 y3.l1 const
## 0.041389804 0.002962007 0.738707206 0.216540341
##
## VAR Estimation Results:
## =========================
## Endogenous variables: y1, y2, y3
## Deterministic variables: const
## Sample size: 135
## Log Likelihood: -984.952
## Roots of the characteristic polynomial:
## 0.8357 0.03617 0.03617
## Call:
## VAR(y = y.Q, type = "const", lag.max = 9, ic = "AIC")
##
##
## Estimation results for equation y1:
## ===================================
## y1 = y1.l1 + y2.l1 + y3.l1 + const
##
## Estimate Std. Error t value Pr(>|t|)
## y1.l1 0.138591 0.103130 1.344 0.1813
## y2.l1 0.009982 0.005736 1.740 0.0842 .
## y3.l1 1.321740 0.312991 4.223 4.48e-05 ***
## const 0.552898 0.338721 1.632 0.1050
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
## Residual standard error: 2.048 on 131 degrees of freedom
## Multiple R-Squared: 0.3495, Adjusted R-squared: 0.3346
## F-statistic: 23.46 on 3 and 131 DF, p-value: 3.218e-12
##
##
## Estimation results for equation y2:
## ===================================
## y2 = y1.l1 + y2.l1 + y3.l1 + const
##
## Estimate Std. Error t value Pr(>|t|)
## y1.l1 0.06221 1.60616 0.039 0.969
## y2.l1 0.02723 0.08933 0.305 0.761
## y3.l1 4.09985 4.87458 0.841 0.402
## const -0.13383 5.27531 -0.025 0.980
##
##
## Residual standard error: 31.89 on 131 degrees of freedom
## Multiple R-Squared: 0.01448, Adjusted R-squared: -0.00809
## F-statistic: 0.6416 on 3 and 131 DF, p-value: 0.5896
##
##
## Estimation results for equation y3:
## ===================================
## y3 = y1.l1 + y2.l1 + y3.l1 + const
##
## Estimate Std. Error t value Pr(>|t|)
## y1.l1 0.041390 0.020881 1.982 0.04955 *
## y2.l1 0.002962 0.001161 2.550 0.01191 *
## y3.l1 0.738707 0.063371 11.657 < 2e-16 ***
## const 0.216540 0.068580 3.157 0.00198 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
## Residual standard error: 0.4146 on 131 degrees of freedom
## Multiple R-Squared: 0.7563, Adjusted R-squared: 0.7508
## F-statistic: 135.6 on 3 and 131 DF, p-value: < 2.2e-16
##
##
##
## Covariance matrix of residuals:
## y1 y2 y3
## y1 4.1933 12.358 0.4642
## y2 12.3581 1017.120 3.4457
## y3 0.4642 3.446 0.1719
##
## Correlation matrix of residuals:
## y1 y2 y3
## y1 1.0000 0.1892 0.5468
## y2 0.1892 1.0000 0.2606
## y3 0.5468 0.2606 1.0000
##
## ============================================================
## Dependent variable:
## -----------------------------
## (1) (2) (3)
## ------------------------------------------------------------
## y1.l1 0.139 0.062 0.041**
## (0.103) (1.606) (0.021)
##
## y2.l1 0.010* 0.027 0.003**
## (0.006) (0.089) (0.001)
##
## y3.l1 1.322*** 4.100 0.739***
## (0.313) (4.875) (0.063)
##
## const 0.553 -0.134 0.217***
## (0.339) (5.275) (0.069)
##
## ------------------------------------------------------------
## Observations 135 135 135
## R2 0.350 0.014 0.756
## Adjusted R2 0.335 -0.008 0.751
## Residual Std. Error (df = 131) 2.048 31.892 0.415
## F Statistic (df = 3; 131) 23.463*** 0.642 135.550***
## ============================================================
## Note: *p<0.1; **p<0.05; ***p<0.01
## $Granger
##
## Granger causality H0: y1 do not Granger-cause y2 y3
##
## data: VAR object varp
## F-Test = 2.0871, df1 = 2, df2 = 393, p-value = 0.1254
##
##
## $Instant
##
## H0: No instantaneous causality between: y1 and y2 y3
##
## data: VAR object varp
## Chi-squared = 31.26, df = 2, p-value = 1.629e-07
## $Granger
##
## Granger causality H0: y2 do not Granger-cause y1 y3
##
## data: VAR object varp
## F-Test = 3.3376, df1 = 2, df2 = 393, p-value = 0.03653
##
##
## $Instant
##
## H0: No instantaneous causality between: y2 and y1 y3
##
## data: VAR object varp
## Chi-squared = 8.9522, df = 2, p-value = 0.01138
## $Granger
##
## Granger causality H0: y3 do not Granger-cause y1 y2
##
## data: VAR object varp
## F-Test = 8.9175, df1 = 2, df2 = 393, p-value = 0.0001631
##
##
## $Instant
##
## H0: No instantaneous causality between: y3 and y1 y2
##
## data: VAR object varp
## Chi-squared = 33.082, df = 2, p-value = 6.553e-08
## [,1] [,2] [,3] [,4]
## [1,] 1 0 0 1
## [2,] 0 1 0 1
## [3,] 1 1 1 1
##
## VAR Estimation Results:
## =======================
##
## Estimated coefficients for equation y1:
## =======================================
## Call:
## y1 = y1.l1 + const
##
## y1.l1 const
## 0.4855963 1.4189792
##
##
## Estimated coefficients for equation y2:
## =======================================
## Call:
## y2 = y2.l1 + const
##
## y2.l1 const
## 0.05335709 5.40991509
##
##
## Estimated coefficients for equation y3:
## =======================================
## Call:
## y3 = y1.l1 + y2.l1 + y3.l1 + const
##
## y1.l1 y2.l1 y3.l1 const
## 0.041389804 0.002962007 0.738707206 0.216540341
##
## VAR Estimation Results:
## =========================
## Endogenous variables: y1, y2, y3
## Deterministic variables: const
## Sample size: 135
## Log Likelihood: -1000.473
## Roots of the characteristic polynomial:
## 0.7387 0.4856 0.05336
## Call:
## VAR(y = y.Q, type = "const", lag.max = 9, ic = "AIC")
##
##
## Estimation results for equation y1:
## ===================================
## y1 = y1.l1 + const
##
## Estimate Std. Error t value Pr(>|t|)
## y1.l1 0.48560 0.07549 6.432 2.08e-09 ***
## const 1.41898 0.28403 4.996 1.81e-06 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
## Residual standard error: 2.201 on 133 degrees of freedom
## Multiple R-Squared: 0.6589, Adjusted R-squared: 0.6537
## F-statistic: 128.4 on 2 and 133 DF, p-value: < 2.2e-16
##
##
## Estimation results for equation y2:
## ===================================
## y2 = y2.l1 + const
##
## Estimate Std. Error t value Pr(>|t|)
## y2.l1 0.05336 0.08640 0.618 0.5379
## const 5.40992 2.78667 1.941 0.0543 .
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
## Residual standard error: 31.84 on 133 degrees of freedom
## Multiple R-Squared: 0.03444, Adjusted R-squared: 0.01992
## F-statistic: 2.372 on 2 and 133 DF, p-value: 0.09725
##
##
## Estimation results for equation y3:
## ===================================
## y3 = y1.l1 + y2.l1 + y3.l1 + const
##
## Estimate Std. Error t value Pr(>|t|)
## y1.l1 0.041390 0.020881 1.982 0.04955 *
## y2.l1 0.002962 0.001161 2.550 0.01191 *
## y3.l1 0.738707 0.063371 11.657 < 2e-16 ***
## const 0.216540 0.068580 3.157 0.00198 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
## Residual standard error: 0.4146 on 131 degrees of freedom
## Multiple R-Squared: 0.9331, Adjusted R-squared: 0.9311
## F-statistic: 456.9 on 4 and 131 DF, p-value: < 2.2e-16
##
##
##
## Covariance matrix of residuals:
## y1 y2 y3
## y1 4.9169 13.903 0.4642
## y2 13.9029 1029.113 3.4457
## y3 0.4642 3.446 0.1719
##
## Correlation matrix of residuals:
## y1 y2 y3
## y1 1.0000 0.1954 0.5050
## y2 0.1954 1.0000 0.2591
## y3 0.5050 0.2591 1.0000
## y1.l1 y2.l1 y3.l1 const
## y1 1 0 0 1
## y2 0 1 0 1
## y3 1 1 1 1
## [[1]]
## y1.l1 y2.l1 y3.l1
## y1 0.4855963 0.000000000 0.0000000
## y2 0.0000000 0.053357094 0.0000000
## y3 0.0413898 0.002962007 0.7387072
## $y1
## fcst lower upper CI
## [1,] 2.914898 -1.098650 6.928447 4.013549
## [2,] 2.916433 -1.453332 7.286199 4.369766
## [3,] 2.889091 -1.670823 7.449005 4.559914
## [4,] 2.865204 -1.821104 7.551511 4.686307
## [5,] 2.845206 -1.927326 7.617739 4.772532
## [6,] 2.828493 -2.003349 7.660335 4.831842
## [7,] 2.814525 -2.058315 7.687365 4.872840
## [8,] 2.802851 -2.098421 7.704123 4.901272
## [9,] 2.793094 -2.127939 7.714128 4.921034
## [10,] 2.784941 -2.149849 7.719730 4.934789
## [11,] 2.778126 -2.166248 7.722501 4.944374
## [12,] 2.772431 -2.178627 7.723489 4.951058
##
## $y2
## fcst lower upper CI
## [1,] 6.395342 -56.11246 68.90314 62.50780
## [2,] 6.101906 -56.55039 68.75420 62.65229
## [3,] 6.017625 -56.72673 68.76197 62.74435
## [4,] 5.953898 -56.85567 68.76346 62.80957
## [5,] 5.900890 -56.95425 68.75603 62.85514
## [6,] 5.856597 -57.03036 68.74355 62.88696
## [7,] 5.819580 -57.08959 68.72875 62.90917
## [8,] 5.788643 -57.13603 68.71332 62.92468
## [9,] 5.762788 -57.17272 68.69830 62.93551
## [10,] 5.741180 -57.20189 68.68425 62.94307
## [11,] 5.723121 -57.22523 68.67148 62.94835
## [12,] 5.708029 -57.24402 68.66007 62.95204
##
## $y3
## fcst lower upper CI
## [1,] 1.434261 0.62164581 2.246877 0.8126157
## [2,] 1.415630 0.29260749 2.538652 1.1230222
## [3,] 1.401061 0.10067068 2.701451 1.3003901
## [4,] 1.388917 -0.02234091 2.800175 1.4112581
## [5,] 1.378769 -0.10502552 2.862564 1.4837947
## [6,] 1.370288 -0.16213584 2.902712 1.5324239
## [7,] 1.363200 -0.20229395 2.928694 1.5654940
## [8,] 1.357276 -0.23090763 2.945460 1.5881839
## [9,] 1.352326 -0.25151611 2.956167 1.6038417
## [10,] 1.348188 -0.26649999 2.962876 1.6146880
## [11,] 1.344730 -0.27749069 2.966951 1.6222208
## [12,] 1.341840 -0.28562133 2.969302 1.6274615
our forecast for real GDP growth rate in 2017 Q1 is 2.99%
library(Quandl)
Quandl.api_key("XzdSwkDsE98Mxj3ixQzG")
rgdp.q <- Quandl("FRED/GDPC96", type="zoo")
def.q <- Quandl("FRED/GDPDEF", type="zoo")
sp.q <- Quandl("YAHOO/INDEX_GSPC/CLOSE", collapse="quarterly",type="zoo")
lead.q <- Quandl("FRED/USSLIND", collapse="quarterly",type="zoo")
lead <- lead.q
par(mfrow=c(2,2))
plot(rgdp.q, xlab="", ylab="", main="Real GDP", col="blue")
plot(def.q, xlab="", ylab="", main="GDP Deflator", col="red")
plot(sp.q, xlab="", ylab="", main="S&P 500 index", col="red")
plot(lead, xlab="", ylab="", main="Leading index for US", col="green")
# log change in house price index : Do not need to take log difference for Leading Index
dl.rgdp <- diff(log(rgdp.q))
dl.def <- diff(log(def.q))
dl.sp <- diff(log(sp.q))
par(mfrow=c(2,2))
plot(dl.rgdp, xlab="", ylab="", main="Real GDP (Diff log)", col="blue")
plot(dl.def, xlab="", ylab="", main="GDP Deflator", col="red")
plot(dl.sp, xlab="", ylab="", main="S&P 500 index", col="red")
plot(lead, xlab="", ylab="", main="Leading index for US", col="green")
y1 <- 400*dl.rgdp
y2 <- 400*(dl.sp - dl.def)
y3 <- lead
y1 <- window(y1, start="1983 Q1", end="2016 Q4")
y2 <- window(y2, start="1983 Q1", end="2016 Q4")
y3 <- window(y3, start="1983 Q1", end="2016 Q4")
par(mfrow=c(1,1))
plot(y1, xlab="", ylab="", main="Growth Rate of Real GDP vs. S&P 500 return", col="blue", ylim=c(-210, 180))
legend("topright",c("GDP Growth Rate", "Adjusted Return of S&P 500"), bty = "n", col = c(4,2), lty = c(1,2))
lines(y2, col="red", lty="dashed")
lines(y3, col="green", lty="dotted")
# form dataset for VAR model
y.Q <- cbind(y1, y2, y3)
y.Q <- window(y.Q, start="1983 Q1", end="2016 Q4")
# load package that allows to estimate and analyze VAR models
# install.packages("vars")
library(vars)
#Q.a
# selection criteria summary
VARselect(y.Q, lag.max=8, type="const")
# estimate a reduced form VAR(2) : Matrix A <- based on SC criteria
# var1$varresult$y1$coefficients[1]
varp <- VAR(y.Q, ic="AIC", lag.max=9, type="const")
varp
summary(varp)
# using stargazer package to report results of VAR estimation
lmp <- varp$varresult
library(stargazer)
stargazer(lmp$y1, lmp$y2, lmp$y3, type="text", dep.var.labels.include=FALSE)
# Q.b
# Granger causality
causality(varp, cause="y1")
causality(varp, cause="y2")
causality(varp, cause="y3")
# Q.c
# estimate restricted VAR - based on Granger causality test eliminate lags of y2 from the equation for y1
# define a matrix with restictions
mat.r <- matrix(1, nrow=3, ncol=4)
mat.r[1, c(2,3)] <- 0
mat.r[2, c(1,3)] <- 0
mat.r
varp.r <- restrict(varp, method="manual", resmat=mat.r)
varp.r
summary(varp.r)
varp.r$restrictions
Acoef(varp.r)
# estimate restricted VAR - keep only variables with t-value larger than 2.0
varp.r.ser <- restrict(varp, method="ser", thresh=2.0)
varp.r.ser
summary(varp.r.ser)
varp.r.ser$restrictions
Acoef(varp.r.ser)
# forecasting
varp.f <- predict(varp, n.ahead=12)
varp.f