#download data
library(readr)
mydata <- read_csv("IOWA Farm Data.csv")
Parsed with column specification:
cols(
  DATE = col_character(),
  A045RC1Q027SBEA = col_double()
)
View(mydata)

#libraries needed
library(forecast)
library(xts)
library(tseries)
library(fpp)
library(fpp2)
#view data
str(mydata)
Classes ‘spec_tbl_df’, ‘tbl_df’, ‘tbl’ and 'data.frame':    129 obs. of  2 variables:
 $ DATE           : chr  "1/1/1947" "4/1/1947" "7/1/1947" "10/1/1947" ...
 $ A045RC1Q027SBEA: num  20.2 20.9 19.8 20 21.9 ...
 - attr(*, "spec")=
  .. cols(
  ..   DATE = col_character(),
  ..   A045RC1Q027SBEA = col_double()
  .. )
head(mydata)
summary(mydata)
     DATE           A045RC1Q027SBEA 
 Length:129         Min.   : 19.80  
 Class :character   1st Qu.: 32.74  
 Mode  :character   Median : 44.57  
                    Mean   : 56.16  
                    3rd Qu.: 67.23  
                    Max.   :156.30  
names(mydata)
[1] "DATE"            "A045RC1Q027SBEA"
#set up time series and plot
myts=ts(mydata$A045RC1Q027SBEA,frequency=4,start=c(1947))
plot(myts, main="Quarterly Iowa NONFarm Income (1948-1979)",ylab="Nonfarm Income (Billions)")

#model 1 - Auto selection ETS
par(mfrow=c(2,2))

model1<-ets(myts,model = "ZZZ")
model1
ETS(M,A,N) 

Call:
 ets(y = myts, model = "ZZZ") 

  Smoothing parameters:
    alpha = 0.9999 
    beta  = 0.1019 

  Initial states:
    l = 19.7697 
    b = 0.432 

  sigma:  0.0193

     AIC     AICc      BIC 
615.4408 615.9286 629.7399 
#MAN was picked automatic

model1.pred=forecast(model1,h=8)
plot(forecast(model1,h=8), ylab="Nonfarm Income",main="Model1 Auto-select: MAN")
model1$par
     alpha       beta          l          b 
 0.9999000  0.1019448 19.7696663  0.4319821 
summary(model1)
ETS(M,A,N) 

Call:
 ets(y = myts, model = "ZZZ") 

  Smoothing parameters:
    alpha = 0.9999 
    beta  = 0.1019 

  Initial states:
    l = 19.7697 
    b = 0.432 

  sigma:  0.0193

     AIC     AICc      BIC 
615.4408 615.9286 629.7399 

Training set error measures:
                    ME    RMSE       MAE       MPE     MAPE
Training set 0.2409512 1.00811 0.6996218 0.2307613 1.350205
                  MASE      ACF1
Training set 0.1655465 0.2917487
hist(residuals(model1))


#Model 2 with ETS ANN
model2<-ets(myts,model = "ANN")
model2
ETS(A,N,N) 

Call:
 ets(y = myts, model = "ANN") 

  Smoothing parameters:
    alpha = 0.9999 

  Initial states:
    l = 20.2415 

  sigma:  1.7852

     AIC     AICc      BIC 
780.4207 780.6127 789.0001 
model2.pred=forecast(model2,h=8)
plot(forecast(model2,h=8), ylab="Nonfarm Income",main="Model_2 ANN")
model2$par
   alpha        l 
 0.99990 20.24154 
summary(model2)
ETS(A,N,N) 

Call:
 ets(y = myts, model = "ANN") 

  Smoothing parameters:
    alpha = 0.9999 

  Initial states:
    l = 20.2415 

  sigma:  1.7852

     AIC     AICc      BIC 
780.4207 780.6127 789.0001 

Training set error measures:
                   ME     RMSE      MAE      MPE     MAPE      MASE
Training set 1.054834 1.771319 1.184007 1.554514 1.939676 0.2801631
                  ACF1
Training set 0.6671985
hist(residuals(model2))



#model 3 ets MAM

Model3 <- ets(myts, model="MAM")
Model3 
ETS(M,Ad,M) 

Call:
 ets(y = myts, model = "MAM") 

  Smoothing parameters:
    alpha = 0.9626 
    beta  = 0.161 
    gamma = 2e-04 
    phi   = 0.9797 

  Initial states:
    l = 19.7873 
    b = 0.0035 
    s = 0.9964 1 1.0024 1.0012

  sigma:  0.0201

     AIC     AICc      BIC 
630.3840 632.2484 658.9821 
Model3.pred=forecast(Model3,h=8)
plot(forecast(Model3,h=8), ylab="Nonfarm Income",main="Model_3 MAM")
Model3$par
       alpha         beta        gamma          phi            l 
9.626321e-01 1.609806e-01 1.508929e-04 9.797130e-01 1.978729e+01 
           b           s0           s1           s2 
3.459506e-03 9.963982e-01 9.999871e-01 1.002411e+00 
summary(Model3)
ETS(M,Ad,M) 

Call:
 ets(y = myts, model = "MAM") 

  Smoothing parameters:
    alpha = 0.9626 
    beta  = 0.161 
    gamma = 2e-04 
    phi   = 0.9797 

  Initial states:
    l = 19.7873 
    b = 0.0035 
    s = 0.9964 1 1.0024 1.0012

  sigma:  0.0201

     AIC     AICc      BIC 
630.3840 632.2484 658.9821 

Training set error measures:
                    ME     RMSE       MAE       MPE     MAPE
Training set 0.2763901 1.001765 0.7210062 0.3943811 1.422822
                  MASE      ACF1
Training set 0.1706065 0.2671561
hist(residuals(Model3))

LS0tCnRpdGxlOiAiUiBOb3RlYm9vayIKb3V0cHV0OiBodG1sX25vdGVib29rCi0tLQoKYGBge3J9CiNkb3dubG9hZCBkYXRhCmxpYnJhcnkocmVhZHIpCm15ZGF0YSA8LSByZWFkX2NzdigiSU9XQSBGYXJtIERhdGEuY3N2IikKVmlldyhteWRhdGEpCgojbGlicmFyaWVzIG5lZWRlZApsaWJyYXJ5KGZvcmVjYXN0KQpsaWJyYXJ5KHh0cykKbGlicmFyeSh0c2VyaWVzKQpsaWJyYXJ5KGZwcCkKbGlicmFyeShmcHAyKQpgYGAKCgpgYGB7cn0KI3ZpZXcgZGF0YQpzdHIobXlkYXRhKQoKaGVhZChteWRhdGEpCnN1bW1hcnkobXlkYXRhKQpuYW1lcyhteWRhdGEpCgoKI3NldCB1cCB0aW1lIHNlcmllcyBhbmQgcGxvdApteXRzPXRzKG15ZGF0YSRBMDQ1UkMxUTAyN1NCRUEsZnJlcXVlbmN5PTQsc3RhcnQ9YygxOTQ3KSkKcGxvdChteXRzLCBtYWluPSJRdWFydGVybHkgSW93YSBOT05GYXJtIEluY29tZSAoMTk0OC0xOTc5KSIseWxhYj0iTm9uZmFybSBJbmNvbWUgKEJpbGxpb25zKSIpCgojbW9kZWwgMSAtIEF1dG8gc2VsZWN0aW9uIEVUUwpwYXIobWZyb3c9YygyLDIpKQptb2RlbDE8LWV0cyhteXRzLG1vZGVsID0gIlpaWiIpCm1vZGVsMQojTUFOIHdhcyBwaWNrZWQgYXV0b21hdGljCgptb2RlbDEucHJlZD1mb3JlY2FzdChtb2RlbDEsaD04KQpwbG90KGZvcmVjYXN0KG1vZGVsMSxoPTgpLCB5bGFiPSJOb25mYXJtIEluY29tZSIsbWFpbj0iTW9kZWwxIEF1dG8tc2VsZWN0OiBNQU4iKQptb2RlbDEkcGFyCnN1bW1hcnkobW9kZWwxKQpoaXN0KHJlc2lkdWFscyhtb2RlbDEpKQoKCiNNb2RlbCAyIHdpdGggRVRTIEFOTgptb2RlbDI8LWV0cyhteXRzLG1vZGVsID0gIkFOTiIpCm1vZGVsMgoKbW9kZWwyLnByZWQ9Zm9yZWNhc3QobW9kZWwyLGg9OCkKcGxvdChmb3JlY2FzdChtb2RlbDIsaD04KSwgeWxhYj0iTm9uZmFybSBJbmNvbWUiLG1haW49Ik1vZGVsXzIgQU5OIikKbW9kZWwyJHBhcgpzdW1tYXJ5KG1vZGVsMikKaGlzdChyZXNpZHVhbHMobW9kZWwyKSkKCgojbW9kZWwgMyBldHMgTUFNCgpNb2RlbDMgPC0gZXRzKG15dHMsIG1vZGVsPSJNQU0iKQpNb2RlbDMgCgoKCk1vZGVsMy5wcmVkPWZvcmVjYXN0KE1vZGVsMyxoPTgpCnBsb3QoZm9yZWNhc3QoTW9kZWwzLGg9OCksIHlsYWI9Ik5vbmZhcm0gSW5jb21lIixtYWluPSJNb2RlbF8zIE1BTSIpCk1vZGVsMyRwYXIKCnN1bW1hcnkoTW9kZWwzKQpoaXN0KHJlc2lkdWFscyhNb2RlbDMpKQoKYGBgCgo=