Import data:

the monthly data for Retail and Food Services Sales

library(Quandl)
## Loading required package: xts
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
library(xts)
rfss<- Quandl("FRED/RSAFSNA", type="ts")

a

y <- window(rfss, end=c(2008,6))

ly <- log(y)
dly1 <-diff(ly)
dly12 <- diff(ly, lag=12)
d2ly1_12 <- diff(diff(ly), lag=12) 
par(mfrow=c(2,3))
plot(rfss, main=expression(rfss))
plot(ly, main=expression(log(y)))
plot.new()
plot(dly1, main=expression(paste(Delta, "log(y)")))
plot(dly12, main=expression(paste(Delta[12], "log(y)")))
plot(d2ly1_12, main=expression(paste(Delta, Delta[12], "log(y)")))

library(forecast)
## Loading required package: timeDate
## This is forecast 7.3
maxlag <- 20
par(mfrow=c(2,4), max=c(3,3,4,2))

Acf(ly, type='correlation', lag=maxlag,  na.action=na.omit, ylab="", main=expression(paste("ACF for lag(y)")))
Acf(dly1, type='correlation', lag=maxlag, na.action=na.omit, ylab="", main=expression(paste("ACF for",Delta," lag(y)")))
Acf(dly12, type='correlation',lag=maxlag,  na.action=na.omit, ylab="", main=expression(paste("ACF for",Delta[12]," lag(y)")))
Acf(d2ly1_12, type='correlation',lag=maxlag,  na.action=na.omit, ylab="", main=expression(paste("ACF for",Delta, Delta[12],"lag(y)")))

Acf(ly, type='partial',lag=maxlag,  na.action=na.omit, ylab="", main=expression(paste("PACF for lag(y)")))
Acf(dly1, type='partial',lag=maxlag,  na.action=na.omit, ylab="", main=expression(paste("PACF for",Delta," lag(y)")))
Acf(dly12, type='partial',lag=maxlag,  na.action=na.omit, ylab="", main=expression(paste("PACF for",Delta[12]," lag(y)")))
Acf(d2ly1_12, type='partial',lag=maxlag,  na.action=na.omit, ylab="", main=expression(paste("PACF for",Delta,Delta[12]," lag(y)")))

m1 <- Arima(dly1, order = c(1,0,0), seasonal = list(order = c(1,0,0), period = 12))

m2 <- Arima(dly1, order = c(1,1,0), seasonal = list(order = c(2,0,0), period = 12))

m3 <- Arima(dly1, order = c(2,0,0), seasonal = list(order = c(1,0,0), period = 12))

m4 <- Arima(dly1, order = c(2,1,0), seasonal = list(order = c(1,0,0), period = 12))

m5 <- Arima(dly1, order = c(2,0,1), seasonal = list(order = c(2,0,0), period = 12))

m6 <- Arima(dly1, order = c(3,0,0), seasonal = list(order = c(1,0,0), period = 12))

m7 <- Arima(dly1, order = c(3,1,0), seasonal = list(order = c(1,0,0), period = 12))

m8 <- Arima(dly1, order = c(6,0,1), seasonal = list(order = c(1,0,0), period = 12))

m9 <- Arima(dly1, order = c(6,1,0), seasonal = list(order = c(2,0,0), period = 12))

m10 <- Arima(dly1, order = c(9,1,0), seasonal = list(order = c(1,0,0), period = 12))

m11 <- Arima(dly1, order = c(9,1,0), seasonal = list(order = c(2,0,0), period = 12))
m1
## Series: dly1 
## ARIMA(1,0,0)(1,0,0)[12] with non-zero mean 
## 
## Coefficients:
##           ar1    sar1  intercept
##       -0.4746  0.9687     0.0026
## s.e.   0.0627  0.0107     0.0163
## 
## sigma^2 estimated as 0.0005405:  log likelihood=446.15
## AIC=-884.3   AICc=-884.09   BIC=-871.17
m2
## Series: dly1 
## ARIMA(1,1,0)(2,0,0)[12]                    
## 
## Coefficients:
##           ar1    sar1    sar2
##       -0.6253  0.8858  0.0841
## s.e.   0.0557  0.0748  0.0767
## 
## sigma^2 estimated as 0.001253:  log likelihood=361.46
## AIC=-714.92   AICc=-714.71   BIC=-701.81
m3
## Series: dly1 
## ARIMA(2,0,0)(1,0,0)[12] with non-zero mean 
## 
## Coefficients:
##           ar1      ar2    sar1  intercept
##       -0.6777  -0.4288  0.9706     0.0028
## s.e.   0.0645   0.0642  0.0102     0.0107
## 
## sigma^2 estimated as 0.0004412:  log likelihood=466.07
## AIC=-922.14   AICc=-921.82   BIC=-905.72
m4
## Series: dly1 
## ARIMA(2,1,0)(1,0,0)[12]                    
## 
## Coefficients:
##           ar1      ar2    sar1
##       -1.0214  -0.6273  0.9755
## s.e.   0.0564   0.0560  0.0088
## 
## sigma^2 estimated as 0.0007566:  log likelihood=408.84
## AIC=-809.68   AICc=-809.47   BIC=-796.57
m5
## Series: dly1 
## ARIMA(2,0,1)(2,0,0)[12] with non-zero mean 
## 
## Coefficients:
##           ar1      ar2      ma1    sar1    sar2  intercept
##       -0.3899  -0.2998  -0.4412  0.7088  0.2667     0.0033
## s.e.   0.2078   0.1430   0.2218  0.0810  0.0840     0.0071
## 
## sigma^2 estimated as 0.0004048:  log likelihood=475.32
## AIC=-936.64   AICc=-936.05   BIC=-913.66
m6
## Series: dly1 
## ARIMA(3,0,0)(1,0,0)[12] with non-zero mean 
## 
## Coefficients:
##           ar1      ar2      ar3    sar1  intercept
##       -0.7098  -0.4814  -0.0768  0.9692     0.0029
## s.e.   0.0710   0.0809   0.0721  0.0107     0.0096
## 
## sigma^2 estimated as 0.0004422:  log likelihood=466.63
## AIC=-921.27   AICc=-920.83   BIC=-901.57
m7
## Series: dly1 
## ARIMA(3,1,0)(1,0,0)[12]                    
## 
## Coefficients:
##           ar1      ar2      ar3    sar1
##       -1.2107  -0.9395  -0.3130  0.9751
## s.e.   0.0680   0.0870   0.0692  0.0090
## 
## sigma^2 estimated as 0.0006876:  log likelihood=418.55
## AIC=-827.1   AICc=-826.79   BIC=-810.71
m8
## Series: dly1 
## ARIMA(6,0,1)(1,0,0)[12] with non-zero mean 
## 
## Coefficients:
##           ar1      ar2      ar3      ar4      ar5     ar6      ma1    sar1
##       -0.2909  -0.2716  -0.0416  -0.1979  -0.0191  0.0581  -0.4788  0.9662
## s.e.   0.5972   0.4621   0.3833   0.2178   0.2241  0.1292   0.5918  0.0116
##       intercept
##          0.0033
## s.e.     0.0059
## 
## sigma^2 estimated as 0.0004197:  log likelihood=474.18
## AIC=-928.35   AICc=-927.17   BIC=-895.52
m9
## Series: dly1 
## ARIMA(6,1,0)(2,0,0)[12]                    
## 
## Coefficients:
##           ar1      ar2      ar3      ar4      ar5      ar6    sar1    sar2
##       -1.5512  -1.7380  -1.4222  -1.1746  -0.7348  -0.2394  0.6122  0.3711
## s.e.   0.0700   0.1214   0.1534   0.1533   0.1208   0.0695  0.0715  0.0725
## 
## sigma^2 estimated as 0.0004582:  log likelihood=457.99
## AIC=-897.99   AICc=-897.02   BIC=-868.48
m10
## Series: dly1 
## ARIMA(9,1,0)(1,0,0)[12]                    
## 
## Coefficients:
##           ar1      ar2      ar3      ar4      ar5      ar6      ar7
##       -1.6051  -1.9386  -1.9608  -1.9519  -1.6934  -1.3168  -0.9821
## s.e.   0.0706   0.1271   0.1766   0.2062   0.2175   0.2059   0.1773
##           ar8      ar9    sar1
##       -0.6367  -0.1426  0.9721
## s.e.   0.1289   0.0711  0.0101
## 
## sigma^2 estimated as 0.0004297:  log likelihood=466.64
## AIC=-911.28   AICc=-909.84   BIC=-875.22
m11
## Series: dly1 
## ARIMA(9,1,0)(2,0,0)[12]                    
## 
## Coefficients:
##           ar1      ar2      ar3      ar4     ar5      ar6      ar7
##       -1.6371  -2.0077  -1.9867  -1.9972  -1.710  -1.3336  -0.9819
## s.e.   0.0708   0.1304   0.1839   0.2143   0.227   0.2139   0.1915
##           ar8      ar9    sar1    sar2
##       -0.6450  -0.1156  0.5703  0.4140
## s.e.   0.1432   0.0768  0.0773  0.0758
## 
## sigma^2 estimated as 0.0003634:  log likelihood=480.98
## AIC=-937.96   AICc=-936.25   BIC=-898.62
par(mar=c(1,1,1,1))
tsdiag(m1, gof.lag=36)

tsdiag(m2, gof.lag=36)

tsdiag(m3, gof.lag=36)

tsdiag(m4, gof.lag=36)

tsdiag(m5, gof.lag=36)

tsdiag(m6, gof.lag=36)

tsdiag(m7, gof.lag=36)

tsdiag(m8, gof.lag=36)

tsdiag(m9, gof.lag=36)

tsdiag(m10, gof.lag=36)

tsdiag(m11, gof.lag=36)

ARIMA(9,1,0),(2,0,0) is the best model among all 11 models that I tested, so I will use for the forcasting.

y <- rfss
m <- m11
m.f <- forecast(m, h=48)
m.f.err <- ly - m.f$mean
## Warning in .cbind.ts(list(e1, e2), c(deparse(substitute(e1))[1L],
## deparse(substitute(e2))[1L]), : non-intersecting series

Forcast

end.month <- 2008 + 6/12
ye <- window(y, end = end.month)
yp <- window(y, start = end.month + 1/12)

Two Part of The Sample

fst <- 1992-1 # 1992-01-01
lst <- 2017-2 # 2017-02-01

Multistep Forecast

multiple<- forecast(m11, length(yp))
plot(multiple, type="o", pch=16, xlim=c(2008,2014), ylim=c(-0.03,0.03),
main="Multistep Forecasts: Retail and Food Services Sales")
lines(multiple$mean, type="p", pch=16, lty="dashed", col="blue")
lines(ly, type="o", pch=16, lty="dashed")

m1.fcast <- forecast(m9, h=25)
plot(m1.fcast, type="o", pch=16, xlim=c(2008,2014), ylim=c(-0.03,0.03),
main=" Forecasts:  Retail and Food Services Sales")
lines(multiple$mean, type="l", pch=16, lty="dashed", col="blue")
lines(ly, type="l", pch=16, lty="dashed")

Conclusion

the ARIMA(9,1,0),(2,0,0) has similar actual and forcasting data.