library(Quandl)
## Warning: package 'Quandl' was built under R version 3.3.3
## Loading required package: xts
## Loading required package: zoo
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
library(zoo)
library(xts)
library(dygraphs)
library(knitr)
library(forecast)
## Loading required package: timeDate
## This is forecast 7.3
library(urca)
library(vars)
## Warning: package 'vars' was built under R version 3.3.3
## Loading required package: MASS
## Loading required package: strucchange
## Warning: package 'strucchange' was built under R version 3.3.3
## Loading required package: sandwich
## Warning: package 'sandwich' was built under R version 3.3.3
## Loading required package: lmtest
## Warning: package 'lmtest' was built under R version 3.3.3
Quandl.api_key('rxVQhZ8_nxxeo2yy4Uz4')
RO <- Quandl("FRED/OPHNFB", type="zoo")
HOP <- Quandl("FRED/HOANBS", type="zoo" )
y1.t=100*(log(RO))
y2.t=100*(log(HOP))
par(mfrow=c(1,2), cex=0.6)
plot(y1.t, xlab="", ylab="", main="Log of Nonfarm
Business Sector: Real Output Per Hour of All Persons")
plot(y2.t, xlab="", ylab="", main="Nonfarm Business Sector: Hours of All Persons")
y1.ers1 <- ur.ers(y1.t, type="P-test", model="trend")
summary(y1.ers1)
##
## ###############################################
## # Elliot, Rothenberg and Stock Unit Root Test #
## ###############################################
##
## Test of type P-test
## detrending of series with intercept and trend
##
## Value of test-statistic is: 38.597
##
## Critical values of P-test are:
## 1pct 5pct 10pct
## critical values 3.96 5.62 6.89
y2.ers1 <- ur.ers(y2.t, type="P-test", model="trend")
summary(y2.ers1)
##
## ###############################################
## # Elliot, Rothenberg and Stock Unit Root Test #
## ###############################################
##
## Test of type P-test
## detrending of series with intercept and trend
##
## Value of test-statistic is: 6.2319
##
## Critical values of P-test are:
## 1pct 5pct 10pct
## critical values 3.96 5.62 6.89
According to ERS-test, both time series in logs are not stationary
dy1.t=diff(y1.t)
dy2.t=diff(y2.t)
dy1.ers1 <- ur.ers(dy1.t, type="P-test", model="trend")
summary(dy1.ers1)
##
## ###############################################
## # Elliot, Rothenberg and Stock Unit Root Test #
## ###############################################
##
## Test of type P-test
## detrending of series with intercept and trend
##
## Value of test-statistic is: 1.3185
##
## Critical values of P-test are:
## 1pct 5pct 10pct
## critical values 3.96 5.62 6.89
dy2.ers1 <- ur.ers(dy2.t, type="P-test", model="trend")
summary(dy2.ers1)
##
## ###############################################
## # Elliot, Rothenberg and Stock Unit Root Test #
## ###############################################
##
## Test of type P-test
## detrending of series with intercept and trend
##
## Value of test-statistic is: 0.3567
##
## Critical values of P-test are:
## 1pct 5pct 10pct
## critical values 3.96 5.62 6.89
Both time series are stationary in first differences as suggested by the ERS test.
y.Q <- cbind(dy1.t, dy2.t)
y.Q <- window(y.Q, start="1947 Q2", end="2016 Q4")
VARselect(y.Q, lag.max=8, type="const")
## $selection
## AIC(n) HQ(n) SC(n) FPE(n)
## 3 2 2 3
##
## $criteria
## 1 2 3 4 5 6
## AIC(n) -1.1343721 -1.2046709 -1.2068439 -1.1847725 -1.1836732 -1.1606766
## HQ(n) -1.1023510 -1.1513022 -1.1321279 -1.0887090 -1.0662622 -1.0219182
## SC(n) -1.0546204 -1.0717513 -1.0207566 -0.9455174 -0.8912503 -0.8150859
## FPE(n) 0.3216246 0.2997932 0.2991468 0.3058307 0.3061794 0.3133203
## 7 8
## AIC(n) -1.1317826 -1.1491281
## HQ(n) -0.9716768 -0.9676748
## SC(n) -0.7330241 -0.6972017
## FPE(n) 0.3225310 0.3170175
Using AIC criteria we choose 3 lags.
var1 <- VAR(y.Q, p=3, type="const")
summary(var1)
##
## VAR Estimation Results:
## =========================
## Endogenous variables: dy1.t, dy2.t
## Deterministic variables: const
## Sample size: 276
## Log Likelihood: -602.811
## Roots of the characteristic polynomial:
## 0.7387 0.7387 0.4311 0.4311 0.4069 0.4069
## Call:
## VAR(y = y.Q, p = 3, type = "const")
##
##
## Estimation results for equation dy1.t:
## ======================================
## dy1.t = dy1.t.l1 + dy2.t.l1 + dy1.t.l2 + dy2.t.l2 + dy1.t.l3 + dy2.t.l3 + const
##
## Estimate Std. Error t value Pr(>|t|)
## dy1.t.l1 -0.06110 0.05921 -1.032 0.30307
## dy2.t.l1 0.06312 0.07128 0.886 0.37667
## dy1.t.l2 0.05149 0.05596 0.920 0.35840
## dy2.t.l2 -0.19658 0.08401 -2.340 0.02002 *
## dy1.t.l3 0.00524 0.05506 0.095 0.92425
## dy2.t.l3 -0.19238 0.07313 -2.631 0.00901 **
## const 0.62337 0.07462 8.353 3.56e-15 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
## Residual standard error: 0.793 on 269 degrees of freedom
## Multiple R-Squared: 0.1176, Adjusted R-squared: 0.09788
## F-statistic: 5.973 on 6 and 269 DF, p-value: 7.068e-06
##
##
## Estimation results for equation dy2.t:
## ======================================
## dy2.t = dy1.t.l1 + dy2.t.l1 + dy1.t.l2 + dy2.t.l2 + dy1.t.l3 + dy2.t.l3 + const
##
## Estimate Std. Error t value Pr(>|t|)
## dy1.t.l1 0.10659 0.05068 2.103 0.0364 *
## dy2.t.l1 0.62592 0.06101 10.260 <2e-16 ***
## dy1.t.l2 0.10479 0.04790 2.188 0.0296 *
## dy2.t.l2 -0.01811 0.07190 -0.252 0.8014
## dy1.t.l3 0.08847 0.04713 1.877 0.0616 .
## dy2.t.l3 -0.04412 0.06259 -0.705 0.4815
## const -0.02740 0.06387 -0.429 0.6683
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
## Residual standard error: 0.6787 on 269 degrees of freedom
## Multiple R-Squared: 0.4387, Adjusted R-squared: 0.4261
## F-statistic: 35.03 on 6 and 269 DF, p-value: < 2.2e-16
##
##
##
## Covariance matrix of residuals:
## dy1.t dy2.t
## dy1.t 0.62881 0.07009
## dy2.t 0.07009 0.46064
##
## Correlation matrix of residuals:
## dy1.t dy2.t
## dy1.t 1.0000 0.1302
## dy2.t 0.1302 1.0000
demean the data
apply(y.Q, 2, mean)
## dy1.t dy2.t
## 0.5273581 0.3047287
y.svar <- sweep(y.Q, 2, apply(y.Q, 2, mean))
apply(y.svar, 2, mean)
## dy1.t dy2.t
## -3.886224e-18 1.758137e-17
plot(y.svar)
estimate reduced form VAR
VARselect(y.svar, lag.max=8)
## $selection
## AIC(n) HQ(n) SC(n) FPE(n)
## 3 2 2 3
##
## $criteria
## 1 2 3 4 5 6
## AIC(n) -1.1343721 -1.2046709 -1.2068439 -1.1847725 -1.1836732 -1.1606766
## HQ(n) -1.1023510 -1.1513022 -1.1321279 -1.0887090 -1.0662622 -1.0219182
## SC(n) -1.0546204 -1.0717513 -1.0207566 -0.9455174 -0.8912503 -0.8150859
## FPE(n) 0.3216246 0.2997932 0.2991468 0.3058307 0.3061794 0.3133203
## 7 8
## AIC(n) -1.1317826 -1.1491281
## HQ(n) -0.9716768 -0.9676748
## SC(n) -0.7330241 -0.6972017
## FPE(n) 0.3225310 0.3170175
myVAR <- VAR(y.svar, ic="SC", lag.max=8)
summary(myVAR)
##
## VAR Estimation Results:
## =========================
## Endogenous variables: dy1.t, dy2.t
## Deterministic variables: const
## Sample size: 277
## Log Likelihood: -617.473
## Roots of the characteristic polynomial:
## 0.6086 0.6086 0.2946 0.2946
## Call:
## VAR(y = y.svar, lag.max = 8, ic = "SC")
##
##
## Estimation results for equation dy1.t:
## ======================================
## dy1.t = dy1.t.l1 + dy2.t.l1 + dy1.t.l2 + dy2.t.l2 + const
##
## Estimate Std. Error t value Pr(>|t|)
## dy1.t.l1 -0.073370 0.057108 -1.285 0.2000
## dy2.t.l1 0.076501 0.073177 1.045 0.2968
## dy1.t.l2 0.103478 0.056191 1.842 0.0666 .
## dy2.t.l2 -0.334295 0.072971 -4.581 7.05e-06 ***
## const 0.006032 0.049350 0.122 0.9028
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
## Residual standard error: 0.8213 on 272 degrees of freedom
## Multiple R-Squared: 0.1027, Adjusted R-squared: 0.08949
## F-statistic: 7.782 on 4 and 272 DF, p-value: 5.966e-06
##
##
## Estimation results for equation dy2.t:
## ======================================
## dy2.t = dy1.t.l1 + dy2.t.l1 + dy1.t.l2 + dy2.t.l2 + const
##
## Estimate Std. Error t value Pr(>|t|)
## dy1.t.l1 0.110054 0.047441 2.320 0.0211 *
## dy2.t.l1 0.643941 0.060790 10.593 <2e-16 ***
## dy1.t.l2 0.110611 0.046679 2.370 0.0185 *
## dy2.t.l2 -0.046616 0.060619 -0.769 0.4426
## const 0.001089 0.040997 0.027 0.9788
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
##
## Residual standard error: 0.6823 on 272 degrees of freedom
## Multiple R-Squared: 0.4275, Adjusted R-squared: 0.4191
## F-statistic: 50.78 on 4 and 272 DF, p-value: < 2.2e-16
##
##
##
## Covariance matrix of residuals:
## dy1.t dy2.t
## dy1.t 0.67459 0.08419
## dy2.t 0.08419 0.46554
##
## Correlation matrix of residuals:
## dy1.t dy2.t
## dy1.t 1.0000 0.1502
## dy2.t 0.1502 1.0000
Blanchard-Quah long run restriction - row 1 column 2 element of the cumulative effect matrix is going to be restricted to 0
mySVAR <- BQ(myVAR)
summary(mySVAR)
##
## SVAR Estimation Results:
## ========================
##
## Call:
## BQ(x = myVAR)
##
## Type: Blanchard-Quah
## Sample size: 277
## Log Likelihood: -622.518
##
## Estimated contemporaneous impact matrix:
## dy1.t dy2.t
## dy1.t 0.7131 0.4075
## dy2.t -0.2457 0.6365
##
## Estimated identified long run impact matrix:
## dy1.t dy2.t
## dy1.t 0.7833 0.000
## dy2.t -0.1809 1.581
##
## Covariance matrix of reduced form residuals (*100):
## dy1.t dy2.t
## dy1.t 67.459 8.419
## dy2.t 8.419 46.554
Contemporaneous impact: A positive one standard deviation technology shock increases Real Output Per Hour of All Persons by 0.7131% and lowers Hours of All Persons by 0.2457 percentage points.A negative one standard deviation non-technology shock increases Real Output Per Hour of All Persons by 0.4075%, increases Hours of All Persons by 0.6365 percentage points. The result of a positive shock was expected and is easy to interpret. However, the result of a negative shock is a little surprising as it shows an increase in productivity despite a negative technological shock.
Long run impact: The long run cumulative effect of any non-technology shock on Real Output Per Hour of All Persons is 0 (this is the long run constraint we imposed). The long run cumulative effect of a single positive one standard deviation technology shock on ROHA increases it by 0.7833% and decreases Hours od All Persons by 0.1809 percentage points. These results meet theory.
myIRF.c <- irf(mySVAR, n.ahead=12, ci=.9, cumulative=TRUE)
myIRF <- irf(mySVAR, n.ahead=12, ci=.9)
myIRF.c$irf[[2]] <- -myIRF.c$irf[[2]]
myIRF.c$Lower[[2]] <- -myIRF.c$Lower[[2]]
myIRF.c$Upper[[2]] <- -myIRF.c$Upper[[2]]
myIRF.c$irf[[2]] <- -myIRF.c$irf[[2]]
myIRF.c$Lower[[2]] <- -myIRF.c$Lower[[2]]
myIRF.c$Upper[[2]] <- -myIRF.c$Upper[[2]]
par(mfrow=c(2,2), cex=.6, mar=c(4,4,2,1))
source("plotIRF.R")
plotIRF(myIRF.c, lwd=2.5, ask=FALSE, vnames="dy1.t", vlabels="Real Output Per Hour", slabels=c("technology shock","non-technology shock"))
plotIRF(myIRF.c, lwd=2.5, ask=FALSE, vnames="dy2.t", vlabels="Hours of All", slabels=c("technology shock","non-technology shock"))
A positive technological shock has a long-term positive effect on labor productivity and a negative eefect on number of working hours. What is noticable here is that this effect does not dampens over time. At first, productivity increases immediately, goes a little down and then gets to the initially increased level and stays stable at this level over time. Number of hours shows the same behavior and stabilizies over time at the initially changed level. A non-technological shock increases productivity right away, but it dampens to the pre-shock level after the 5th period. Unlike productivity, labor hours do not get back to the pre-shock level over time. It increases immediately and then gradually keeps increasing until reaching a stable “high” level over time.
Our IRFs look similarly to GALI (1999) with some differences. The IRF for technological shock on productivity is the same, although GALI`s non-technological shock brings productivity even to the lower level than pre-shock. However, evenually it comes to the initial level as in our case. Both tech and non-tech shocks appear to be the same as in our model, with minor discrepancies.
par(mar=c(4,5,2,1))
plot( fevd(mySVAR, n.ahead=40) ,addbars=3 )
mySVAR.fevd <- fevd(mySVAR, n.ahead=10)
mySVAR.fevd[[1]][c(1,3,6,10),]
## dy1.t dy2.t
## [1,] 0.7538244 0.2461756
## [2,] 0.7437308 0.2562692
## [3,] 0.7180470 0.2819530
## [4,] 0.7173526 0.2826474
mySVAR.fevd[[2]][c(1,3,6,10),]
## dy1.t dy2.t
## [1,] 0.12966358 0.8703364
## [2,] 0.08723805 0.9127619
## [3,] 0.08738878 0.9126112
## [4,] 0.08757486 0.9124251
As we can see from FEVDs, the difference between long-run and short-run explanations of shocks do not differ much for both shocks. The productivity shock in SR: productivity fluctuations is explained for 75% by its own variable and for 25% by laborhours. In LR this ratio changes to 71% and 29% respectively. The hours shock in SR: labor hours fluctuations are explained for 87% by its own variable and for 13% by productivity. Beginning from the 2nd period the ratio changes to 91% and 9% respectively and keeps at this proportion in the LR.