Sabol ECM three-state model

This is the code to produce suitable forecasts of the labor market. Using The ECM method from Sabol (2014).

First we must load in the proper packages and the data:

library(zoo)
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
library(vars)
## Loading required package: MASS
## Loading required package: strucchange
## Loading required package: sandwich
## Loading required package: urca
## Loading required package: lmtest
library(forecast)
## Loading required package: timeDate
## This is forecast 7.1
library(tsDyn)

Here we define the number of steps ahead we wish to forecast and the rolling window of data we seek to use.

lflow<- read.csv("F:/R codes/Labor Market Forecasting- R Models/flowR.csv")
datam<-as.matrix(lflow[,-(5:10)])
n<-length(datam[,1])
steps<-36
varwind<-260

After reading in the data we have to then create the smoothed state-vector values

# eur1<-StructTS(lflow[1:n,5])
# EUR<-tsSmooth(eur1)[,1]+tsSmooth(eur1)[,2]
# en1<-StructTS(lflow[1:n,6])
# ENR<-tsSmooth(en1)[,1]+tsSmooth(en1)[,2]
# uer1<-StructTS(lflow[1:n,7])
# UER<-tsSmooth(uer1)[,1]+tsSmooth(uer1)[,2]
# unr1<-StructTS(lflow[1:n,8])
# UNR<-tsSmooth(unr1)[,1]+tsSmooth(unr1)[,2]
# ner1<-StructTS(lflow[1:n,9])
# NER<-tsSmooth(ner1)[,1]+tsSmooth(ner1)[,2]
# nur1<-StructTS(lflow[1:n,10])
# NUR<-tsSmooth(nur1)[,1]+tsSmooth(nur1)[,2]
# hwi1<-StructTS(lflow[1:n,11])
# HWI<-tsSmooth(hwi1)[,1]+tsSmooth(hwi1)[,2]
# uic1<-StructTS(lflow[1:n,12])
# UIC<-tsSmooth(uic1)[,1]+tsSmooth(uic1)[,2]

After this we need to create a log version for the ECM Estimation

attach(lflow)
labordata<-data.frame(UE,NE,EU,NU,EN,UN,URATE,EPOP,LFPR)

Perform the ECM estimation

sel       <-VARselect(labordata[(n-varwind):n,],lag.max=3,type="const")
  maxlag    <-as.vector(sel$selection[1])
  ecmini  <-ca.jo(labordata,type=c("eigen"),ecdet=c("const"),K=maxlag,spec=c("longrun"))
    
   coi<- if(sum(ecmini@teststat > ecmini@cval[19:27])-1> 0){coint = sum(ecmini@teststat > ecmini@cval[19:27])-1
    }else{
      coint = 1}
    
  modelecm  <-VECM(labordata,lag=maxlag,r=coi,include=c("const"),estim=c("ML"))
      feure<-(predict(modelecm,n.ahead=steps)[1:steps,"EU"])
      fenre<-(predict(modelecm,n.ahead=steps)[1:steps,"EN"])
      fuere<-(predict(modelecm,n.ahead=steps)[1:steps,"UE"])
      fnere<-(predict(modelecm,n.ahead=steps)[1:steps,"NE"])
      fnure<-(predict(modelecm,n.ahead=steps)[1:steps,"NU"])
      funre<-(predict(modelecm,n.ahead=steps)[1:steps,"UN"])

This leads to our forecasts if the s,f and g which will be used to forecast the steady state values of E,U and N

  s1 = fenre*fnure + fnere*feure + fnure*feure
  f1 = funre*fnere + fnure*fuere + fnere*fuere
  g1 = feure*funre + fuere*fenre + funre*fenre

Leaving the above result to the side for a minute

varwind length window for the true values of past P,N, E and U

  Nwind<-N[(n-varwind):n] 
  Pwind<-POP[(n-varwind):n]
  Ewind<-E[(n-varwind):n]
  Uwind<-U[(n-varwind):n]

forecast Population, P and out of the labor force, N which will is in essence our labor force participation forecast

  PP<-forecast(ets(Pwind),h=steps)$mean
  fN<-forecast(ets(Nwind),h=steps)$mean

forecasts of the convergence gaps

Varwind length windows for the flow rates, s,f and g

  gwind<-EU[(n-varwind):n]*UN[(n-varwind):n] + UE[(n-varwind):n]*EN[(n-varwind):n] + UN[(n-varwind):n]*EN[(n-varwind):n]
  fwind<-UN[(n-varwind):n]*NE[(n-varwind):n] + NU[(n-varwind):n]*UE[(n-varwind):n] + NE[(n-varwind):n]*UE[(n-varwind):n]
  swind<-EN[(n-varwind):n]*NU[(n-varwind):n] + NE[(n-varwind):n]*EU[(n-varwind):n] + NU[(n-varwind):n]*EU[(n-varwind):n]

Creation of the Convergence Gaps Time series for forecasting values of true U - Steady State U

  ConvN<-Nwind-(Pwind/(gwind+fwind+swind))*gwind
  ConvU<-Uwind-(Pwind/(gwind+fwind+swind))*swind
  ConvE<-Ewind-(Pwind/(gwind+fwind+swind))*fwind 

Forecasting the Convergence Gaps

  fcn<-forecast(auto.arima(ConvN),h=steps)$mean
  fcu<-forecast(auto.arima(ConvU),h=steps)$mean
  fce<-forecast(auto.arima(ConvE),h=steps)$mean

last step involves combining the forecasts with our identities

putting it all together

Steady state values of U and E, may choose one of two ways to forecast the steady states

  U_ss=(PP/(s1+f1+g1))*s1
  E_ss=(PP/(s1+f1+g1))*f1
#   U_ss=((PP-fN)/(s+f))*s
#   E_ss=((PP-fN)/(s+f))*f

combine U_ss and E_ss with the convergence gap forecasts we made

  fE<-E_ss+fce
  fU<-U_ss+fcu

Our forecasts for the unemployment rate and labor force participation rate are

  urfcast<-(fU/(fE+fU))*100
  lfcast<-((PP-fN)/PP)*100

Combine with the original data

  date1<-as.yearmon(1990.1+seq(0,n+steps-1)/12)
  Datel<-as.Date(date1)
  l<-n-76
fDate=Datel[(n+1):(n+steps)]

        fcastE<-zoo(c(E[l:n],fE),Datel[l:(n+steps)])
        fcastU<-zoo(c(U[l:n],fU),Datel[l:(n+steps)])
        fcastN<-zoo(c(N[l:n],fN),Datel[l:(n+steps)])
        fcastlfpr<-zoo(c(LFPR[l:n]*100,lfcast),Datel[l:(n+steps)])
        fcastur<-zoo(c(URATE[l:n]*100,urfcast),Datel[l:(n+steps)])

make a plot of the forecasts

  par(mfrow=c(2,2))
  par(tck=c(0.055),pty=c("m"))
      plot(fcastN,col="red",type="l",xlab="OLF Forecast",ylab="Thousands"      ,main=Datel[n+1])
      plot(fcastE,type="l",xlab="Employment Forecast",ylab="Thousands",main="ECM Forecast",col="red",col.sub="green",font.sub=3,cex.main=.9)
      plot(fcastlfpr,ylab="Rate",type="l",lty="dashed",xlab="LFPR Forecast",col="red",font.sub=3) 
      plot(fcastur,type="l",ylab="Rate",xlab="Unemployment Rate Forecast",col="red")

finaldata <- data.frame(Date=fDate,fE,fU,lfcast,urfcast)
print(finaldata)
##          Date       fE       fU   lfcast  urfcast
## 1  2016-06-01 151507.2 7507.678 62.63458 4.721367
## 2  2016-07-01 151833.6 7459.700 62.61304 4.682997
## 3  2016-08-01 152456.0 7442.555 62.59153 4.654547
## 4  2016-09-01 152052.3 7457.918 62.57006 4.675511
## 5  2016-10-01 152023.5 7442.577 62.54863 4.667185
## 6  2016-11-01 152157.0 7405.238 62.52723 4.640972
## 7  2016-12-01 152635.3 7343.578 62.50587 4.590343
## 8  2017-01-01 152678.5 7352.720 62.48455 4.594555
## 9  2017-02-01 152657.0 7349.962 62.46326 4.593525
## 10 2017-03-01 152876.1 7323.050 62.44200 4.571217
## 11 2017-04-01 152964.8 7311.800 62.42079 4.561987
## 12 2017-05-01 153105.9 7303.672 62.39960 4.553140
## 13 2017-06-01 153195.1 7294.017 62.37846 4.544867
## 14 2017-07-01 153332.9 7283.014 62.35735 4.534429
## 15 2017-08-01 153445.7 7274.910 62.33627 4.526432
## 16 2017-09-01 153549.4 7268.491 62.31523 4.519702
## 17 2017-10-01 153671.1 7260.885 62.29423 4.511774
## 18 2017-11-01 153784.9 7254.271 62.27326 4.504664
## 19 2017-12-01 153898.5 7248.527 62.25232 4.498083
## 20 2018-01-01 154008.5 7243.179 62.23142 4.491847
## 21 2018-02-01 154122.0 7237.881 62.21056 4.485551
## 22 2018-03-01 154234.6 7233.050 62.18973 4.479566
## 23 2018-04-01 154345.3 7228.635 62.16893 4.473887
## 24 2018-05-01 154456.7 7224.383 62.14817 4.468292
## 25 2018-06-01 154567.5 7220.358 62.12744 4.462855
## 26 2018-07-01 154678.0 7216.584 62.10675 4.457583
## 27 2018-08-01 154788.2 7212.984 62.08609 4.452428
## 28 2018-09-01 154898.1 7209.544 62.06547 4.447381
## 29 2018-10-01 155007.8 7206.255 62.04488 4.442435
## 30 2018-11-01 155117.2 7203.105 62.02432 4.437587
## 31 2018-12-01 155226.4 7200.070 62.00380 4.432819
## 32 2019-01-01 155335.4 7197.140 61.98331 4.428124
## 33 2019-02-01 155444.1 7194.305 61.96286 4.423497
## 34 2019-03-01 155552.7 7191.557 61.94244 4.418932
## 35 2019-04-01 155661.1 7188.884 61.92205 4.414422
## 36 2019-05-01 155769.3 7186.278 61.90170 4.409962

write.csv(file=“3stateforecasts.csv”,x=finaldata)