Libraries

library(quantmod)
## Warning: package 'quantmod' was built under R version 4.0.3
## Loading required package: xts
## Warning: package 'xts' was built under R version 4.0.3
## Loading required package: zoo
## Warning: package 'zoo' was built under R version 4.0.3
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
## Loading required package: TTR
## Warning: package 'TTR' was built under R version 4.0.3
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
## Version 0.4-0 included new data defaults. See ?getSymbols.
library(tidyverse)
## Warning: package 'tidyverse' was built under R version 4.0.3
## -- Attaching packages --------------------------------------- tidyverse 1.3.0 --
## v ggplot2 3.3.2     v purrr   0.3.4
## v tibble  3.0.3     v dplyr   1.0.2
## v tidyr   1.1.2     v stringr 1.4.0
## v readr   1.3.1     v forcats 0.5.0
## -- Conflicts ------------------------------------------ tidyverse_conflicts() --
## x dplyr::filter() masks stats::filter()
## x dplyr::first()  masks xts::first()
## x dplyr::lag()    masks stats::lag()
## x dplyr::last()   masks xts::last()
library(dplyr)
library(fpp2)
## Warning: package 'fpp2' was built under R version 4.0.3
## -- Attaching packages ---------------------------------------------- fpp2 2.4 --
## v forecast  8.13     v expsmooth 2.3 
## v fma       2.4
## Warning: package 'forecast' was built under R version 4.0.3
## Warning: package 'fma' was built under R version 4.0.3
## Warning: package 'expsmooth' was built under R version 4.0.3
## 
library(kableExtra)
## Warning: package 'kableExtra' was built under R version 4.0.3
## 
## Attaching package: 'kableExtra'
## The following object is masked from 'package:dplyr':
## 
##     group_rows

Get Unemployment data and chart

getSymbols(c("UNRATENSA"), src="FRED")
## 'getSymbols' currently uses auto.assign=TRUE by default, but will
## use auto.assign=FALSE in 0.5-0. You will still be able to use
## 'loadSymbols' to automatically load data. getOption("getSymbols.env")
## and getOption("getSymbols.auto.assign") will still be checked for
## alternate defaults.
## 
## This message is shown once per session and may be disabled by setting 
## options("getSymbols.warning4.0"=FALSE). See ?getSymbols for details.
## [1] "UNRATENSA"
UNRATENSA %>% head()
##            UNRATENSA
## 1948-01-01       4.0
## 1948-02-01       4.7
## 1948-03-01       4.5
## 1948-04-01       4.0
## 1948-05-01       3.4
## 1948-06-01       3.9
autoplot(UNRATENSA)

Set Train and test set. We are going to throw out 2020 because it would probably need an xreg

unrate.train=window(UNRATENSA, end=as.Date("2017-12-01"))
unrate.test=window(UNRATENSA, start=as.Date("2018-01-01"), end=as.Date("2019-12-01"))
autoplot(unrate.test)

Fit Neural Net. The seasonal lags is 12, so we won’t vary that, but we will vary size

#Size=1
fit1 <- nnetar(unrate.train, size=1,repeats=100,lambda="auto")
fcast1 <- forecast(fit1, h=24)

#Size=2
fit2 <- nnetar(unrate.train, size=2,repeats=100,lambda="auto")
fcast2 <- forecast(fit2, h=24)

#Size=3
fit3 <- nnetar(unrate.train, size=3,repeats=100,lambda="auto")
fcast3 <- forecast(fit3, h=24)

#Size=4
fit4 <- nnetar(unrate.train, size=4,repeats=100,lambda="auto")
fcast4 <- forecast(fit4, h=24)

#Size=5
fit5 <- nnetar(unrate.train, size=5,repeats=100,lambda="auto")
fcast5 <- forecast(fit5, h=24)



autoplot(fcast1)+autolayer(fcast2, series="size=2")+autolayer(fcast3, series="size=3")+autolayer(fcast4, series="size=4")+autolayer(fcast5, series="size=5")+coord_cartesian(xlim = c(775, 875))

Accuracy

kable(accuracy(fcast1,unrate.test),caption="NN(25,1)")
NN(25,1)
ME RMSE MAE MPE MAPE MASE ACF1
Training set -0.0023939 0.2418903 0.1854816 -0.345001 3.460493 0.5302182 0.0492369
Test set -0.6885785 0.7862486 0.6933565 -19.093463 19.200652 1.9820309 NA
kable(accuracy(fcast2,unrate.test),caption="NN(25,2)")
NN(25,2)
ME RMSE MAE MPE MAPE MASE ACF1
Training set -0.0017432 0.2326425 0.1780830 -0.2729393 3.314391 0.5090687 0.0249543
Test set -0.5463865 0.6483575 0.5554113 -15.2576176 15.460753 1.5877005 NA
kable(accuracy(fcast3,unrate.test),caption="NN(25,3)")
NN(25,3)
ME RMSE MAE MPE MAPE MASE ACF1
Training set -0.0013529 0.2208340 0.1695003 -0.248496 3.152621 0.4845342 0.0212717
Test set -0.3650587 0.4595191 0.3813886 -10.308305 10.680362 1.0902387 NA
kable(accuracy(fcast4,unrate.test),caption="NN(25,4)")
NN(25,4)
ME RMSE MAE MPE MAPE MASE ACF1
Training set -0.0011336 0.2080790 0.1598523 -0.2175744 2.968462 0.4569543 0.0023539
Test set -0.1950826 0.2670129 0.2193771 -5.5975131 6.157523 0.6271119 NA
kable(accuracy(fcast5,unrate.test),caption="NN(25,5)")
NN(25,5)
ME RMSE MAE MPE MAPE MASE ACF1
Training set -0.0008315 0.1929220 0.1488886 -0.1952425 2.777632 0.4256135 -0.012067
Test set 0.0424574 0.1265611 0.1006013 0.9058477 2.600926 0.2875792 NA

The neural net with 5 levels was the best, as it had the lowest RMSE. It even had a lower RMSE than the training set, though that is likely a coincidence