library(forecast)
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
library("fma")
library(fpp3)
## -- Attaching packages -------------------------------------------- fpp3 0.4.0 --
## v tibble      3.1.2      v tsibble     1.0.1 
## v dplyr       1.0.7      v tsibbledata 0.3.0 
## v tidyr       1.1.3      v feasts      0.2.2 
## v lubridate   1.7.10     v fable       0.3.1 
## v ggplot2     3.3.5
## -- Conflicts ------------------------------------------------- fpp3_conflicts --
## x lubridate::date()      masks base::date()
## x dplyr::filter()        masks stats::filter()
## x fabletools::forecast() masks forecast::forecast()
## x tsibble::intersect()   masks base::intersect()
## x tsibble::interval()    masks lubridate::interval()
## x dplyr::lag()           masks stats::lag()
## x tsibble::setdiff()     masks base::setdiff()
## x tsibble::union()       masks base::union()
library(expsmooth)
library(fpp2)
## 
## Attaching package: 'fpp2'
## The following object is masked from 'package:fpp3':
## 
##     insurance
library(urca)
library(Quandl)
## Loading required package: xts
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## The following object is masked from 'package:tsibble':
## 
##     index
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
## 
## Attaching package: 'xts'
## The following objects are masked from 'package:dplyr':
## 
##     first, last

1

pig_ses <-ses(pigs, h=5)
pig_ses$model
## Simple exponential smoothing 
## 
## Call:
##  ses(y = pigs, h = 5) 
## 
##   Smoothing parameters:
##     alpha = 0.2971 
## 
##   Initial states:
##     l = 77260.0561 
## 
##   sigma:  10308.58
## 
##      AIC     AICc      BIC 
## 4462.955 4463.086 4472.665
forecast(pig_ses, h=4)
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Sep 1995       98816.41 85605.43 112027.4 78611.97 119020.8
## Oct 1995       98816.41 85034.52 112598.3 77738.83 119894.0
## Nov 1995       98816.41 84486.34 113146.5 76900.46 120732.4
## Dec 1995       98816.41 83958.37 113674.4 76092.99 121539.8
pig_ses$mean[1]- 1.96*(sd(pig_ses$residuals))
## [1] 78679.97
pig_ses$mean[1]+ 1.96*(sd(pig_ses$residuals))
## [1] 118952.8

#The prediction interval calculated is significantly similar to the intervals predicted by forecast.

#2

ct_ses <- function(y, alpha, level){
  piggy= c()
  level = c()
  level2 = for(i in 2:length(y)){ level[i]=y[i]}
  ctses = for(i in 2:length(y)){piggy[i] = alpha*y[i]+ ((1-alpha)*level[i])}
      ctses3=forecast(piggy[i], h=1)
      return(ctses3)
}
ct_ses(pigs,.2971, 77260)
##        Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 100506         100506    NA    NA    NA    NA

$The function gives a close approximation of the SES() forecast. I think there’s something off with the t-1 recall. Please advise if you can!

#3

ct_ses_mod <- function(y, alpha, level){
  piggy= c()
  level = c()
  tots=c()
  level2 = for(i in 2:length(y)){ level[i]=y[i]}
  ctses = for(i in 2:length(y)){piggy[i] = alpha*y[i]+ ((1-alpha)*level[i])}
  ct_ses_sse= for(i in 2:length(y)){tots[i] = (piggy[i]- y[i])}
  hopeful = sum(tots[2:length(tots)]^2)
      return(hopeful)
}
ct_ses_mod(pigs,.2971, 77260)
## [1] 3.59989e-21

#Yeah, 3 and 4 are wrong, but I tried really hard. I’ll keep at it and hopefully can improve before Saturday deadline, but I’m slammed at work as well :/.

#5.

head(books)
## Time Series:
## Start = 1 
## End = 6 
## Frequency = 1 
##   Paperback Hardcover
## 1       199       139
## 2       172       128
## 3       111       172
## 4       209       139
## 5       161       191
## 6       119       168
paper = ts(books[,1])
hard = ts(books[,2])
autoplot(books)

#There does appear to be a positive trend for both types of books. It is difficult to determine if there is seasonality, but I don’t think there is.

paper_ses = ses(paper,5)
paper_ses
##    Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 31       207.1097 162.4882 251.7311 138.8670 275.3523
## 32       207.1097 161.8589 252.3604 137.9046 276.3147
## 33       207.1097 161.2382 252.9811 136.9554 277.2639
## 34       207.1097 160.6259 253.5935 136.0188 278.2005
## 35       207.1097 160.0215 254.1979 135.0945 279.1249
hard_ses = ses(hard,5)
hard_ses
##    Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 31       239.5601 197.2026 281.9176 174.7799 304.3403
## 32       239.5601 194.9788 284.1414 171.3788 307.7414
## 33       239.5601 192.8607 286.2595 168.1396 310.9806
## 34       239.5601 190.8347 288.2855 165.0410 314.0792
## 35       239.5601 188.8895 290.2306 162.0662 317.0540
autoplot(books)+ autolayer(paper_ses, PI=FALSE, series="Paperback")+ autolayer(hard_ses, PI=FALSE, series="Hardcover")

summary(hard_ses)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = hard, h = 5) 
## 
##   Smoothing parameters:
##     alpha = 0.3283 
## 
##   Initial states:
##     l = 149.2861 
## 
##   sigma:  33.0517
## 
##      AIC     AICc      BIC 
## 315.8506 316.7737 320.0542 
## 
## Error measures:
##                    ME     RMSE      MAE      MPE     MAPE      MASE       ACF1
## Training set 9.166735 31.93101 26.77319 2.636189 13.39487 0.7987887 -0.1417763
## 
## Forecasts:
##    Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 31       239.5601 197.2026 281.9176 174.7799 304.3403
## 32       239.5601 194.9788 284.1414 171.3788 307.7414
## 33       239.5601 192.8607 286.2595 168.1396 310.9806
## 34       239.5601 190.8347 288.2855 165.0410 314.0792
## 35       239.5601 188.8895 290.2306 162.0662 317.0540
summary(paper_ses)
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = paper, h = 5) 
## 
##   Smoothing parameters:
##     alpha = 0.1685 
## 
##   Initial states:
##     l = 170.8271 
## 
##   sigma:  34.8183
## 
##      AIC     AICc      BIC 
## 318.9747 319.8978 323.1783 
## 
## Error measures:
##                    ME     RMSE     MAE       MPE     MAPE      MASE       ACF1
## Training set 7.175981 33.63769 27.8431 0.4736071 15.57784 0.7021303 -0.2117522
## 
## Forecasts:
##    Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 31       207.1097 162.4882 251.7311 138.8670 275.3523
## 32       207.1097 161.8589 252.3604 137.9046 276.3147
## 33       207.1097 161.2382 252.9811 136.9554 277.2639
## 34       207.1097 160.6259 253.5935 136.0188 278.2005
## 35       207.1097 160.0215 254.1979 135.0945 279.1249

#6

hard_holt = holt(hard,5)
summary(hard_holt)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = hard, h = 5) 
## 
##   Smoothing parameters:
##     alpha = 1e-04 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 147.7935 
##     b = 3.303 
## 
##   sigma:  29.2106
## 
##      AIC     AICc      BIC 
## 310.2148 312.7148 317.2208 
## 
## Error measures:
##                      ME     RMSE      MAE       MPE    MAPE      MASE
## Training set -0.1357882 27.19358 23.15557 -2.114792 12.1626 0.6908555
##                     ACF1
## Training set -0.03245186
## 
## Forecasts:
##    Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 31       250.1739 212.7390 287.6087 192.9222 307.4256
## 32       253.4765 216.0416 290.9113 196.2248 310.7282
## 33       256.7791 219.3442 294.2140 199.5274 314.0308
## 34       260.0817 222.6468 297.5166 202.8300 317.3334
## 35       263.3843 225.9494 300.8192 206.1326 320.6360
paper_holt = holt(paper,5)
summary(paper_holt)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = paper, h = 5) 
## 
##   Smoothing parameters:
##     alpha = 1e-04 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 170.699 
##     b = 1.2621 
## 
##   sigma:  33.4464
## 
##      AIC     AICc      BIC 
## 318.3396 320.8396 325.3456 
## 
## Error measures:
##                     ME     RMSE      MAE       MPE     MAPE      MASE
## Training set -3.717178 31.13692 26.18083 -5.508526 15.58354 0.6602122
##                    ACF1
## Training set -0.1750792
## 
## Forecasts:
##    Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 31       209.4668 166.6035 252.3301 143.9130 275.0205
## 32       210.7177 167.8544 253.5811 145.1640 276.2715
## 33       211.9687 169.1054 254.8320 146.4149 277.5225
## 34       213.2197 170.3564 256.0830 147.6659 278.7735
## 35       214.4707 171.6073 257.3340 148.9169 280.0245

#b&c = The RMSE for both Holt models is lower than the RMSE for the ses models. The AICs of the models are very similar. The MPE is lower with SES for paperback, and higher for hardcover. Due to the lower RMSE, I would go with the Holt method with all other metrics having tradeoffs between paper and hardcovers. The lower Holt RMSE is due to the additional smoothing measures included in the Holt method, which allow for forecasts to vary, as opposed to the SES method which fixes the forecast. The merits of the SES method is that they use exponentialy decreasing weights over time, specified as alpha. This allows for optimization of the alpha parameter to increase the fit of the model. The merits of the Holt method is that it allows for smoothing based on three factors: level, trend, and seasonal.

print("Paper SES Lower and Upper")
## [1] "Paper SES Lower and Upper"
round(paper_ses$lower[1, '95%'],2) 
##    95% 
## 138.87
round(paper_ses$upper[1,'95%'],2)
##    95% 
## 275.35
print("Hard SES Lower and Upper")
## [1] "Hard SES Lower and Upper"
round(hard_ses$lower[1, '95%'],2) 
##    95% 
## 174.78
round(hard_ses$upper[1,'95%'],2)
##    95% 
## 304.34
print("Paper Holt Lower and Upper")
## [1] "Paper Holt Lower and Upper"
round(paper_holt$lower[1, '95%'],2) 
##    95% 
## 143.91
round(paper_holt$upper[1,'95%'],2)
##    95% 
## 275.02
print("Hard SES Lower and Upper")
## [1] "Hard SES Lower and Upper"
round(hard_holt$lower[1, '95%'],2) 
##    95% 
## 192.92
round(hard_holt$upper[1,'95%'],2)
##    95% 
## 307.43

#The calculated prediction intervals are identical.

#7

holt1 = holt(eggs, h=100)
holt2= holt(eggs, h=100, damped=TRUE)
holt3= holt(eggs, h=100,  seasonal="additive")
autoplot(eggs)+autolayer(holt1, PI=FALSE, series="BAse")+autolayer(holt2, PI=FALSE, series="damped")+autolayer(holt3, PI=FALSE, series="Damped, M")

summary(holt1)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = eggs, h = 100) 
## 
##   Smoothing parameters:
##     alpha = 0.8124 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 314.7232 
##     b = -2.7222 
## 
##   sigma:  27.1665
## 
##      AIC     AICc      BIC 
## 1053.755 1054.437 1066.472 
## 
## Error measures:
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set 0.04499087 26.58219 19.18491 -1.142201 9.653791 0.9463626
##                    ACF1
## Training set 0.01348202
## 
## Forecasts:
##      Point Forecast       Lo 80     Hi 80       Lo 95    Hi 95
## 1994    59.78553313   24.970286  94.60078    6.540207 113.0309
## 1995    57.06372643   12.206005 101.92145  -11.540238 125.6677
## 1996    54.34191973    1.308668 107.37517  -26.765440 135.4493
## 1997    51.62011302   -8.488401 111.72863  -40.307926 143.5482
## 1998    48.89830632  -17.537664 115.33428  -52.706742 150.5034
## 1999    46.17649962  -26.035964 118.38896  -64.262933 156.6159
## 2000    43.45469292  -34.106500 121.01589  -75.164916 162.0743
## 2001    40.73288622  -41.832449 123.29822  -85.539898 167.0057
## 2002    38.01107951  -49.273098 125.29526  -95.478551 171.5007
## 2003    35.28927281  -56.472472 127.05102 -105.048206 175.6268
## 2004    32.56746611  -63.464327 128.59926 -114.300487 179.4354
## 2005    29.84565941  -70.275216 129.96654 -123.276007 182.9673
## 2006    27.12385271  -76.926479 131.17418 -132.007398 186.2551
## 2007    24.40204600  -83.435566 132.23966 -140.521350 189.3254
## 2008    21.68023930  -89.816966 133.17744 -148.840022 192.2005
## 2009    18.95843260  -96.082866 133.99973 -156.982051 194.8989
## 2010    16.23662590 -102.243631 134.71688 -164.963291 197.4365
## 2011    13.51481920 -108.308165 135.33780 -172.797358 199.8270
## 2012    10.79301249 -114.284184 135.87021 -180.496053 202.0821
## 2013     8.07120579 -120.178426 136.32084 -188.069681 204.2121
## 2014     5.34939909 -125.996817 136.69562 -195.527304 206.2261
## 2015     2.62759239 -131.744601 136.99979 -202.876944 208.1321
## 2016    -0.09421431 -137.426446 137.23802 -210.125737 209.9373
## 2017    -2.81602102 -143.046526 137.41448 -217.280071 211.6480
## 2018    -5.53782772 -148.608596 137.53294 -224.345686 213.2700
## 2019    -8.25963442 -154.116046 137.59678 -231.327766 214.8085
## 2020   -10.98144112 -159.571946 137.60906 -238.231008 216.2681
## 2021   -13.70324782 -164.979092 137.57260 -245.059687 217.6532
## 2022   -16.42505453 -170.340036 137.48993 -251.817706 218.9676
## 2023   -19.14686123 -175.657116 137.36339 -258.508640 220.2149
## 2024   -21.86866793 -180.932478 137.19514 -265.135773 221.3984
## 2025   -24.59047463 -186.168101 136.98715 -271.702130 222.5212
## 2026   -27.31228133 -191.365811 136.74125 -278.210504 223.5859
## 2027   -30.03408804 -196.527300 136.45912 -284.663482 224.5953
## 2028   -32.75589474 -201.654137 136.14235 -291.063466 225.5517
## 2029   -35.47770144 -206.747783 135.79238 -297.412688 226.4573
## 2030   -38.19950814 -211.809598 135.41058 -303.713228 227.3142
## 2031   -40.92131484 -216.840851 134.99822 -309.967029 228.1244
## 2032   -43.64312155 -221.842733 134.55649 -316.175908 228.8897
## 2033   -46.36492825 -226.816355 134.08650 -322.341569 229.6117
## 2034   -49.08673495 -231.762762 133.58929 -328.465610 230.2921
## 2035   -51.80854165 -236.682939 133.06586 -334.549533 230.9324
## 2036   -54.53034835 -241.577809 132.51711 -340.594753 231.5341
## 2037   -57.25215506 -246.448244 131.94393 -346.602603 232.0983
## 2038   -59.97396176 -251.295068 131.34714 -352.574344 232.6264
## 2039   -62.69576846 -256.119059 130.72752 -358.511164 233.1196
## 2040   -65.41757516 -260.920954 130.08580 -364.414190 233.5790
## 2041   -68.13938186 -265.701450 129.42269 -370.284491 234.0057
## 2042   -70.86118857 -270.461210 128.73883 -376.123079 234.4007
## 2043   -73.58299527 -275.200863 128.03487 -381.930915 234.7649
## 2044   -76.30480197 -279.921006 127.31140 -387.708914 235.0993
## 2045   -79.02660867 -284.622209 126.56899 -393.457946 235.4047
## 2046   -81.74841537 -289.305013 125.80818 -399.178839 235.6820
## 2047   -84.47022208 -293.969936 125.02949 -404.872385 235.9319
## 2048   -87.19202878 -298.617469 124.23341 -410.539337 236.1553
## 2049   -89.91383548 -303.248085 123.42041 -416.180415 236.3527
## 2050   -92.63564218 -307.862233 122.59095 -421.796308 236.5250
## 2051   -95.35744888 -312.460345 121.74545 -427.387675 236.6728
## 2052   -98.07925559 -317.042831 120.88432 -432.955146 236.7966
## 2053  -100.80106229 -321.610088 120.00796 -438.499326 236.8972
## 2054  -103.52286899 -326.162494 119.11676 -444.020793 236.9751
## 2055  -106.24467569 -330.700413 118.21106 -449.520103 237.0308
## 2056  -108.96648239 -335.224193 117.29123 -454.997790 237.0648
## 2057  -111.68828910 -339.734170 116.35759 -460.454367 237.0778
## 2058  -114.41009580 -344.230666 115.41047 -465.890327 237.0701
## 2059  -117.13190250 -348.713991 114.45019 -471.306143 237.0423
## 2060  -119.85370920 -353.184443 113.47702 -476.702272 236.9949
## 2061  -122.57551590 -357.642310 112.49128 -482.079153 236.9281
## 2062  -125.29732261 -362.087868 111.49322 -487.437211 236.8426
## 2063  -128.01912931 -366.521384 110.48313 -492.776852 236.7386
## 2064  -130.74093601 -370.943117 109.46124 -498.098471 236.6166
## 2065  -133.46274271 -375.353314 108.42783 -503.402447 236.4770
## 2066  -136.18454941 -379.752215 107.38312 -508.689149 236.3200
## 2067  -138.90635612 -384.140052 106.32734 -513.958929 236.1462
## 2068  -141.62816282 -388.517050 105.26072 -519.212132 235.9558
## 2069  -144.34996952 -392.883424 104.18349 -524.449088 235.7491
## 2070  -147.07177622 -397.239385 103.09583 -529.670117 235.5266
## 2071  -149.79358292 -401.585134 101.99797 -534.875530 235.2884
## 2072  -152.51538963 -405.920870 100.89009 -540.065628 235.0348
## 2073  -155.23719633 -410.246781  99.77239 -545.240700 234.7663
## 2074  -157.95900303 -414.563051  98.64505 -550.401029 234.4830
## 2075  -160.68080973 -418.869861  97.50824 -555.546889 234.1853
## 2076  -163.40261643 -423.167382  96.36215 -560.678543 233.8733
## 2077  -166.12442314 -427.455784  95.20694 -565.796249 233.5474
## 2078  -168.84622984 -431.735228  94.04277 -570.900257 233.2078
## 2079  -171.56803654 -436.005874  92.86980 -575.990809 232.8547
## 2080  -174.28984324 -440.267874  91.68819 -581.068139 232.4885
## 2081  -177.01164994 -444.521379  90.49808 -586.132476 232.1092
## 2082  -179.73345665 -448.766534  89.29962 -591.184043 231.7171
## 2083  -182.45526335 -453.003480  88.09295 -596.223054 231.3125
## 2084  -185.17707005 -457.232353  86.87821 -601.249720 230.8956
## 2085  -187.89887675 -461.453288  85.65553 -606.264245 230.4665
## 2086  -190.62068345 -465.666414  84.42505 -611.266828 230.0255
## 2087  -193.34249016 -469.871857  83.18688 -616.257661 229.5727
## 2088  -196.06429686 -474.069741  81.94115 -621.236934 229.1083
## 2089  -198.78610356 -478.260186  80.68798 -626.204828 228.6326
## 2090  -201.50791026 -482.443308  79.42749 -631.161524 228.1457
## 2091  -204.22971696 -486.619220  78.15979 -636.107193 227.6478
## 2092  -206.95152367 -490.788035  76.88499 -641.042007 227.1390
## 2093  -209.67333037 -494.949859  75.60320 -645.966131 226.6195
summary(holt2)
## 
## Forecast method: Damped Holt's method
## 
## Model Information:
## Damped Holt's method 
## 
## Call:
##  holt(y = eggs, h = 100, damped = TRUE) 
## 
##   Smoothing parameters:
##     alpha = 0.8462 
##     beta  = 0.004 
##     phi   = 0.8 
## 
##   Initial states:
##     l = 276.9842 
##     b = 4.9966 
## 
##   sigma:  27.2755
## 
##      AIC     AICc      BIC 
## 1055.458 1056.423 1070.718 
## 
## Error measures:
##                     ME     RMSE     MAE       MPE     MAPE      MASE
## Training set -2.891496 26.54019 19.2795 -2.907633 10.01894 0.9510287
##                      ACF1
## Training set -0.003195358
## 
## Forecasts:
##      Point Forecast        Lo 80     Hi 80       Lo 95    Hi 95
## 1994       62.84884   27.8938665  97.80381    9.389822 116.3079
## 1995       62.79968   16.9363788 108.66299   -7.342188 132.9416
## 1996       62.76036    8.0760016 117.44472  -20.872149 146.3929
## 1997       62.72890    0.4263979 125.03140  -32.554554 158.0124
## 1998       62.70373   -6.4067675 131.81423  -42.991656 168.3991
## 1999       62.68360  -12.6402008 138.00740  -52.514212 177.8814
## 2000       62.66749  -18.4083657 143.74335  -61.327332 186.6623
## 2001       62.65461  -23.8016345 149.11085  -69.568803 194.8780
## 2002       62.64430  -28.8842950 154.17289  -77.336605 202.6252
## 2003       62.63605  -33.7040638 158.97616  -84.703440 209.9755
## 2004       62.62945  -38.2975438 163.55645  -91.725068 216.9840
## 2005       62.62417  -42.6935603 167.94191  -98.445402 223.6938
## 2006       62.61995  -46.9153057 172.15521 -104.899769 230.1397
## 2007       62.61657  -50.9817746 176.21492 -111.117108 236.3503
## 2008       62.61387  -54.9087594 180.13650 -117.121483 242.3492
## 2009       62.61171  -58.7095595 183.93298 -122.933160 248.1566
## 2010       62.60998  -62.3954996 187.61546 -128.569404 253.7894
## 2011       62.60860  -65.9763173 191.19351 -134.045059 259.2623
## 2012       62.60749  -69.4604571 194.67544 -139.373005 264.5880
## 2013       62.60660  -72.8552982 198.06851 -144.564498 269.7777
## 2014       62.60590  -76.1673328 201.37913 -149.629443 274.8412
## 2015       62.60533  -79.4023080 204.61297 -154.576610 279.7873
## 2016       62.60488  -82.5653397 207.77509 -159.413810 284.6236
## 2017       62.60451  -85.6610050 210.87003 -164.148030 289.3571
## 2018       62.60422  -88.6934183 213.90187 -168.785552 293.9940
## 2019       62.60399  -91.6662933 216.87428 -173.332049 298.5400
## 2020       62.60381  -94.5829961 219.79061 -177.792663 303.0003
## 2021       62.60366  -97.4465882 222.65390 -182.172070 307.3794
## 2022       62.60354 -100.2598639 225.46694 -186.474541 311.6816
## 2023       62.60344 -103.0253816 228.23227 -190.703985 315.9109
## 2024       62.60337 -105.7454907 230.95223 -194.863993 320.0707
## 2025       62.60331 -108.4223544 233.62897 -198.957870 324.1645
## 2026       62.60326 -111.0579701 236.26449 -202.988671 328.1952
## 2027       62.60322 -113.6541863 238.86062 -206.959220 332.1657
## 2028       62.60319 -116.2127174 241.41909 -210.872140 336.0785
## 2029       62.60316 -118.7351575 243.94148 -214.729866 339.9362
## 2030       62.60314 -121.2229914 246.42928 -218.534669 343.7410
## 2031       62.60313 -123.6776050 248.88386 -222.288668 347.4949
## 2032       62.60311 -126.1002941 251.30652 -225.993844 351.2001
## 2033       62.60310 -128.4922724 253.69848 -229.652054 354.8583
## 2034       62.60310 -130.8546789 256.06087 -233.265039 358.4712
## 2035       62.60309 -133.1885838 258.39476 -236.834435 362.0406
## 2036       62.60308 -135.4949941 260.70116 -240.361782 365.5679
## 2037       62.60308 -137.7748593 262.98102 -243.848533 369.0547
## 2038       62.60308 -140.0290751 265.23523 -247.296057 372.5022
## 2039       62.60307 -142.2584882 267.46464 -250.705648 375.9118
## 2040       62.60307 -144.4638997 269.67004 -254.078533 379.2847
## 2041       62.60307 -146.6460685 271.85221 -257.415871 382.6220
## 2042       62.60307 -148.8057141 274.01185 -260.718763 385.9249
## 2043       62.60307 -150.9435200 276.14965 -263.988255 389.1944
## 2044       62.60307 -153.0601355 278.26627 -267.225338 392.4315
## 2045       62.60307 -155.1561786 280.36231 -270.430959 395.6371
## 2046       62.60307 -157.2322378 282.43837 -273.606018 398.8121
## 2047       62.60306 -159.2888738 284.49500 -276.751371 401.9575
## 2048       62.60306 -161.3266220 286.53275 -279.867837 405.0740
## 2049       62.60306 -163.3459932 288.55212 -282.956199 408.1623
## 2050       62.60306 -165.3474759 290.55360 -286.017203 411.2233
## 2051       62.60306 -167.3315372 292.53766 -289.051562 414.2577
## 2052       62.60306 -169.2986243 294.50475 -292.059962 417.2661
## 2053       62.60306 -171.2491656 296.45529 -295.043058 420.2492
## 2054       62.60306 -173.1835715 298.38970 -298.001476 423.2076
## 2055       62.60306 -175.1022361 300.30836 -300.935821 426.1419
## 2056       62.60306 -177.0055375 302.21166 -303.846669 429.0528
## 2057       62.60306 -178.8938390 304.09997 -306.734577 431.9407
## 2058       62.60306 -180.7674896 305.97362 -309.600078 434.8062
## 2059       62.60306 -182.6268253 307.83295 -312.443686 437.6498
## 2060       62.60306 -184.4721691 309.67830 -315.265896 440.4720
## 2061       62.60306 -186.3038323 311.50996 -318.067183 443.2733
## 2062       62.60306 -188.1221147 313.32824 -320.848006 446.0541
## 2063       62.60306 -189.9273054 315.13343 -323.608807 448.8149
## 2064       62.60306 -191.7196831 316.92581 -326.350012 451.5561
## 2065       62.60306 -193.4995169 318.70564 -329.072033 454.2782
## 2066       62.60306 -195.2670664 320.47319 -331.775267 456.9814
## 2067       62.60306 -197.0225826 322.22871 -334.460097 459.6662
## 2068       62.60306 -198.7663080 323.97243 -337.126895 462.3330
## 2069       62.60306 -200.4984769 325.70460 -339.776019 464.9821
## 2070       62.60306 -202.2193162 327.42544 -342.407816 467.6139
## 2071       62.60306 -203.9290453 329.13517 -345.022621 470.2287
## 2072       62.60306 -205.6278766 330.83400 -347.620759 472.8269
## 2073       62.60306 -207.3160160 332.52214 -350.202545 475.4087
## 2074       62.60306 -208.9936627 334.19979 -352.768284 477.9744
## 2075       62.60306 -210.6610101 335.86714 -355.318272 480.5244
## 2076       62.60306 -212.3182455 337.52437 -357.852795 483.0589
## 2077       62.60306 -213.9655507 339.17168 -360.372131 485.5783
## 2078       62.60306 -215.6031021 340.80923 -362.876550 488.0827
## 2079       62.60306 -217.2310709 342.43720 -365.366313 490.5724
## 2080       62.60306 -218.8496235 344.05575 -367.841676 493.0478
## 2081       62.60306 -220.4589213 345.66505 -370.302884 495.5090
## 2082       62.60306 -222.0591213 347.26525 -372.750179 497.9563
## 2083       62.60306 -223.6503761 348.85650 -375.183793 500.3899
## 2084       62.60306 -225.2328340 350.43896 -377.603954 502.8101
## 2085       62.60306 -226.8066394 352.01277 -380.010881 505.2170
## 2086       62.60306 -228.3719326 353.57806 -382.404791 507.6109
## 2087       62.60306 -229.9288503 355.13498 -384.785891 509.9920
## 2088       62.60306 -231.4775255 356.68365 -387.154385 512.3605
## 2089       62.60306 -233.0180877 358.22421 -389.510472 514.7166
## 2090       62.60306 -234.5506631 359.75679 -391.854344 517.0605
## 2091       62.60306 -236.0753747 361.28150 -394.186189 519.3923
## 2092       62.60306 -237.5923423 362.79847 -396.506191 521.7123
## 2093       62.60306 -239.1016828 364.30781 -398.814528 524.0207
summary(holt3)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
##  holt(y = eggs, h = 100, seasonal = "additive") 
## 
##   Smoothing parameters:
##     alpha = 0.8124 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 314.7232 
##     b = -2.7222 
## 
##   sigma:  27.1665
## 
##      AIC     AICc      BIC 
## 1053.755 1054.437 1066.472 
## 
## Error measures:
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set 0.04499087 26.58219 19.18491 -1.142201 9.653791 0.9463626
##                    ACF1
## Training set 0.01348202
## 
## Forecasts:
##      Point Forecast       Lo 80     Hi 80       Lo 95    Hi 95
## 1994    59.78553313   24.970286  94.60078    6.540207 113.0309
## 1995    57.06372643   12.206005 101.92145  -11.540238 125.6677
## 1996    54.34191973    1.308668 107.37517  -26.765440 135.4493
## 1997    51.62011302   -8.488401 111.72863  -40.307926 143.5482
## 1998    48.89830632  -17.537664 115.33428  -52.706742 150.5034
## 1999    46.17649962  -26.035964 118.38896  -64.262933 156.6159
## 2000    43.45469292  -34.106500 121.01589  -75.164916 162.0743
## 2001    40.73288622  -41.832449 123.29822  -85.539898 167.0057
## 2002    38.01107951  -49.273098 125.29526  -95.478551 171.5007
## 2003    35.28927281  -56.472472 127.05102 -105.048206 175.6268
## 2004    32.56746611  -63.464327 128.59926 -114.300487 179.4354
## 2005    29.84565941  -70.275216 129.96654 -123.276007 182.9673
## 2006    27.12385271  -76.926479 131.17418 -132.007398 186.2551
## 2007    24.40204600  -83.435566 132.23966 -140.521350 189.3254
## 2008    21.68023930  -89.816966 133.17744 -148.840022 192.2005
## 2009    18.95843260  -96.082866 133.99973 -156.982051 194.8989
## 2010    16.23662590 -102.243631 134.71688 -164.963291 197.4365
## 2011    13.51481920 -108.308165 135.33780 -172.797358 199.8270
## 2012    10.79301249 -114.284184 135.87021 -180.496053 202.0821
## 2013     8.07120579 -120.178426 136.32084 -188.069681 204.2121
## 2014     5.34939909 -125.996817 136.69562 -195.527304 206.2261
## 2015     2.62759239 -131.744601 136.99979 -202.876944 208.1321
## 2016    -0.09421431 -137.426446 137.23802 -210.125737 209.9373
## 2017    -2.81602102 -143.046526 137.41448 -217.280071 211.6480
## 2018    -5.53782772 -148.608596 137.53294 -224.345686 213.2700
## 2019    -8.25963442 -154.116046 137.59678 -231.327766 214.8085
## 2020   -10.98144112 -159.571946 137.60906 -238.231008 216.2681
## 2021   -13.70324782 -164.979092 137.57260 -245.059687 217.6532
## 2022   -16.42505453 -170.340036 137.48993 -251.817706 218.9676
## 2023   -19.14686123 -175.657116 137.36339 -258.508640 220.2149
## 2024   -21.86866793 -180.932478 137.19514 -265.135773 221.3984
## 2025   -24.59047463 -186.168101 136.98715 -271.702130 222.5212
## 2026   -27.31228133 -191.365811 136.74125 -278.210504 223.5859
## 2027   -30.03408804 -196.527300 136.45912 -284.663482 224.5953
## 2028   -32.75589474 -201.654137 136.14235 -291.063466 225.5517
## 2029   -35.47770144 -206.747783 135.79238 -297.412688 226.4573
## 2030   -38.19950814 -211.809598 135.41058 -303.713228 227.3142
## 2031   -40.92131484 -216.840851 134.99822 -309.967029 228.1244
## 2032   -43.64312155 -221.842733 134.55649 -316.175908 228.8897
## 2033   -46.36492825 -226.816355 134.08650 -322.341569 229.6117
## 2034   -49.08673495 -231.762762 133.58929 -328.465610 230.2921
## 2035   -51.80854165 -236.682939 133.06586 -334.549533 230.9324
## 2036   -54.53034835 -241.577809 132.51711 -340.594753 231.5341
## 2037   -57.25215506 -246.448244 131.94393 -346.602603 232.0983
## 2038   -59.97396176 -251.295068 131.34714 -352.574344 232.6264
## 2039   -62.69576846 -256.119059 130.72752 -358.511164 233.1196
## 2040   -65.41757516 -260.920954 130.08580 -364.414190 233.5790
## 2041   -68.13938186 -265.701450 129.42269 -370.284491 234.0057
## 2042   -70.86118857 -270.461210 128.73883 -376.123079 234.4007
## 2043   -73.58299527 -275.200863 128.03487 -381.930915 234.7649
## 2044   -76.30480197 -279.921006 127.31140 -387.708914 235.0993
## 2045   -79.02660867 -284.622209 126.56899 -393.457946 235.4047
## 2046   -81.74841537 -289.305013 125.80818 -399.178839 235.6820
## 2047   -84.47022208 -293.969936 125.02949 -404.872385 235.9319
## 2048   -87.19202878 -298.617469 124.23341 -410.539337 236.1553
## 2049   -89.91383548 -303.248085 123.42041 -416.180415 236.3527
## 2050   -92.63564218 -307.862233 122.59095 -421.796308 236.5250
## 2051   -95.35744888 -312.460345 121.74545 -427.387675 236.6728
## 2052   -98.07925559 -317.042831 120.88432 -432.955146 236.7966
## 2053  -100.80106229 -321.610088 120.00796 -438.499326 236.8972
## 2054  -103.52286899 -326.162494 119.11676 -444.020793 236.9751
## 2055  -106.24467569 -330.700413 118.21106 -449.520103 237.0308
## 2056  -108.96648239 -335.224193 117.29123 -454.997790 237.0648
## 2057  -111.68828910 -339.734170 116.35759 -460.454367 237.0778
## 2058  -114.41009580 -344.230666 115.41047 -465.890327 237.0701
## 2059  -117.13190250 -348.713991 114.45019 -471.306143 237.0423
## 2060  -119.85370920 -353.184443 113.47702 -476.702272 236.9949
## 2061  -122.57551590 -357.642310 112.49128 -482.079153 236.9281
## 2062  -125.29732261 -362.087868 111.49322 -487.437211 236.8426
## 2063  -128.01912931 -366.521384 110.48313 -492.776852 236.7386
## 2064  -130.74093601 -370.943117 109.46124 -498.098471 236.6166
## 2065  -133.46274271 -375.353314 108.42783 -503.402447 236.4770
## 2066  -136.18454941 -379.752215 107.38312 -508.689149 236.3200
## 2067  -138.90635612 -384.140052 106.32734 -513.958929 236.1462
## 2068  -141.62816282 -388.517050 105.26072 -519.212132 235.9558
## 2069  -144.34996952 -392.883424 104.18349 -524.449088 235.7491
## 2070  -147.07177622 -397.239385 103.09583 -529.670117 235.5266
## 2071  -149.79358292 -401.585134 101.99797 -534.875530 235.2884
## 2072  -152.51538963 -405.920870 100.89009 -540.065628 235.0348
## 2073  -155.23719633 -410.246781  99.77239 -545.240700 234.7663
## 2074  -157.95900303 -414.563051  98.64505 -550.401029 234.4830
## 2075  -160.68080973 -418.869861  97.50824 -555.546889 234.1853
## 2076  -163.40261643 -423.167382  96.36215 -560.678543 233.8733
## 2077  -166.12442314 -427.455784  95.20694 -565.796249 233.5474
## 2078  -168.84622984 -431.735228  94.04277 -570.900257 233.2078
## 2079  -171.56803654 -436.005874  92.86980 -575.990809 232.8547
## 2080  -174.28984324 -440.267874  91.68819 -581.068139 232.4885
## 2081  -177.01164994 -444.521379  90.49808 -586.132476 232.1092
## 2082  -179.73345665 -448.766534  89.29962 -591.184043 231.7171
## 2083  -182.45526335 -453.003480  88.09295 -596.223054 231.3125
## 2084  -185.17707005 -457.232353  86.87821 -601.249720 230.8956
## 2085  -187.89887675 -461.453288  85.65553 -606.264245 230.4665
## 2086  -190.62068345 -465.666414  84.42505 -611.266828 230.0255
## 2087  -193.34249016 -469.871857  83.18688 -616.257661 229.5727
## 2088  -196.06429686 -474.069741  81.94115 -621.236934 229.1083
## 2089  -198.78610356 -478.260186  80.68798 -626.204828 228.6326
## 2090  -201.50791026 -482.443308  79.42749 -631.161524 228.1457
## 2091  -204.22971696 -486.619220  78.15979 -636.107193 227.6478
## 2092  -206.95152367 -490.788035  76.88499 -641.042007 227.1390
## 2093  -209.67333037 -494.949859  75.60320 -645.966131 226.6195

#The damped methods offer the besr RMSE

#8.

retail = readxl::read_excel("C:\\\\Users\\\\cthom\\\\OneDrive\\\\Boston College\\\\Econometrics\\\\retail.xlsx", skip=1)
retail
## # A tibble: 381 x 190
##    `Series ID`         A3349335T A3349627V A3349338X A3349398A A3349468W
##    <dttm>                  <dbl>     <dbl>     <dbl>     <dbl>     <dbl>
##  1 1982-04-01 00:00:00      303.      41.7      63.9      409.      65.8
##  2 1982-05-01 00:00:00      298.      43.1      64        405.      65.8
##  3 1982-06-01 00:00:00      298       40.3      62.7      401       62.3
##  4 1982-07-01 00:00:00      308.      40.9      65.6      414.      68.2
##  5 1982-08-01 00:00:00      299.      42.1      62.6      404.      66  
##  6 1982-09-01 00:00:00      305.      42        64.4      412.      62.3
##  7 1982-10-01 00:00:00      318       46.1      66        430.      66.2
##  8 1982-11-01 00:00:00      334.      46.5      65.3      446.      68.9
##  9 1982-12-01 00:00:00      390.      53.8      77.9      521.      90.8
## 10 1983-01-01 00:00:00      311.      43.8      65.1      420.      58  
## # ... with 371 more rows, and 184 more variables: A3349336V <dbl>,
## #   A3349337W <dbl>, A3349397X <dbl>, A3349399C <dbl>, A3349874C <dbl>,
## #   A3349871W <dbl>, A3349790V <dbl>, A3349556W <dbl>, A3349791W <dbl>,
## #   A3349401C <dbl>, A3349873A <dbl>, A3349872X <dbl>, A3349709X <dbl>,
## #   A3349792X <dbl>, A3349789K <dbl>, A3349555V <dbl>, A3349565X <dbl>,
## #   A3349414R <dbl>, A3349799R <dbl>, A3349642T <dbl>, A3349413L <dbl>,
## #   A3349564W <dbl>, A3349416V <dbl>, A3349643V <dbl>, A3349483V <dbl>,
## #   A3349722T <dbl>, A3349727C <dbl>, A3349641R <dbl>, A3349639C <dbl>,
## #   A3349415T <dbl>, A3349349F <dbl>, A3349563V <dbl>, A3349350R <dbl>,
## #   A3349640L <dbl>, A3349566A <dbl>, A3349417W <dbl>, A3349352V <dbl>,
## #   A3349882C <dbl>, A3349561R <dbl>, A3349883F <dbl>, A3349721R <dbl>,
## #   A3349478A <dbl>, A3349637X <dbl>, A3349479C <dbl>, A3349797K <dbl>,
## #   A3349477X <dbl>, A3349719C <dbl>, A3349884J <dbl>, A3349562T <dbl>,
## #   A3349348C <dbl>, A3349480L <dbl>, A3349476W <dbl>, A3349881A <dbl>,
## #   A3349410F <dbl>, A3349481R <dbl>, A3349718A <dbl>, A3349411J <dbl>,
## #   A3349638A <dbl>, A3349654A <dbl>, A3349499L <dbl>, A3349902A <dbl>,
## #   A3349432V <dbl>, A3349656F <dbl>, A3349361W <dbl>, A3349501L <dbl>,
## #   A3349503T <dbl>, A3349360V <dbl>, A3349903C <dbl>, A3349905J <dbl>,
## #   A3349658K <dbl>, A3349575C <dbl>, A3349428C <dbl>, A3349500K <dbl>,
## #   A3349577J <dbl>, A3349433W <dbl>, A3349576F <dbl>, A3349574A <dbl>,
## #   A3349816F <dbl>, A3349815C <dbl>, A3349744F <dbl>, A3349823C <dbl>,
## #   A3349508C <dbl>, A3349742A <dbl>, A3349661X <dbl>, A3349660W <dbl>,
## #   A3349909T <dbl>, A3349824F <dbl>, A3349507A <dbl>, A3349580W <dbl>,
## #   A3349825J <dbl>, A3349434X <dbl>, A3349822A <dbl>, A3349821X <dbl>,
## #   A3349581X <dbl>, A3349908R <dbl>, A3349743C <dbl>, A3349910A <dbl>,
## #   A3349435A <dbl>, A3349365F <dbl>, A3349746K <dbl>, ...
train= ts(retail[,"A3349873A"], frequency=12, start=c(1982,4), end=c(2010,12))
retail_test=ts(retail[,"A3349873A"], frequency=12, start=c(2011,1))
autoplot(train)

#Multiplicative is necessary as the magnitude of the seasonal component appears dependent on the trend level.

retail_holt = hw(train,damped=TRUE, h=36)
retail_holt2 = hw(train,seasonal="multiplicative", h=36)


autoplot(train)+autolayer(retail_holt, PI=FALSE, series="1")+ autolayer(retail_holt2, PI=FALSE, series='2')

summary(retail_holt)
## 
## Forecast method: Damped Holt-Winters' additive method
## 
## Model Information:
## Damped Holt-Winters' additive method 
## 
## Call:
##  hw(y = train, h = 36, damped = TRUE) 
## 
##   Smoothing parameters:
##     alpha = 0.43 
##     beta  = 1e-04 
##     gamma = 0.3978 
##     phi   = 0.9799 
## 
##   Initial states:
##     l = 67.3816 
##     b = 0.8563 
##     s = -12.887 -21.0597 -12.9414 104.2212 18.1768 3.7531
##            -4.8436 -11.1564 -16.6849 -21.3847 -7.9789 -17.2146
## 
##   sigma:  12.8761
## 
##      AIC     AICc      BIC 
## 3797.795 3799.893 3866.979 
## 
## Error measures:
##                    ME     RMSE      MAE       MPE     MAPE      MASE      ACF1
## Training set 1.020543 12.55482 9.142844 0.5598927 5.200267 0.5729764 0.1885705
## 
## Forecasts:
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Jan 2011       264.1705 247.6691 280.6718 238.9338 289.4071
## Feb 2011       247.0081 229.0454 264.9708 219.5366 274.4797
## Mar 2011       263.7690 244.4545 283.0834 234.2300 293.3079
## Apr 2011       251.3965 230.8183 271.9748 219.9248 282.8683
## May 2011       262.0711 240.3017 283.8406 228.7777 295.3646
## Jun 2011       246.3067 223.4075 269.2060 211.2853 281.3281
## Jul 2011       254.1998 230.2234 278.1762 217.5310 290.8686
## Aug 2011       257.6087 232.6010 282.6164 219.3627 295.8547
## Sep 2011       263.8282 237.8296 289.8267 224.0668 303.5896
## Oct 2011       272.3911 245.4376 299.3445 231.1693 313.6128
## Nov 2011       305.0157 277.1396 332.8918 262.3829 347.6485
## Dec 2011       405.3099 376.5403 434.0794 361.3107 449.3091
## Jan 2012       264.1478 232.2913 296.0042 215.4275 312.8680
## Feb 2012       246.9859 214.3441 279.6277 197.0646 296.9072
## Mar 2012       263.7472 230.3382 297.1561 212.6525 314.8418
## Apr 2012       251.3752 217.2159 285.5344 199.1331 303.6172
## May 2012       262.0502 227.1565 296.9439 208.6850 315.4155
## Jun 2012       246.2862 210.6730 281.8994 191.8205 300.7519
## Jul 2012       254.1797 217.8609 290.4985 198.6349 309.7245
## Aug 2012       257.5890 220.5778 294.6002 200.9853 314.1927
## Sep 2012       263.8089 226.1178 301.5000 206.1653 321.4524
## Oct 2012       272.3721 234.0129 310.7313 213.7068 331.0375
## Nov 2012       304.9971 265.9810 344.0132 245.3272 364.6671
## Dec 2012       405.2917 365.6294 444.9540 344.6334 465.9500
## Jan 2013       264.1300 222.1693 306.0906 199.9567 328.3032
## Feb 2013       246.9684 204.4058 289.5310 181.8746 312.0623
## Mar 2013       263.7301 220.5737 306.8864 197.7282 329.7320
## Apr 2013       251.3584 207.6162 295.1006 184.4605 318.2564
## May 2013       262.0338 217.7133 306.3543 194.2514 329.8162
## Jun 2013       246.2701 201.3786 291.1617 177.6144 314.9258
## Jul 2013       254.1640 208.7084 299.6195 184.6456 323.6823
## Aug 2013       257.5736 211.5607 303.5864 187.2030 327.9442
## Sep 2013       263.7937 217.2301 310.3574 192.5808 335.0067
## Oct 2013       272.3573 225.2492 319.4654 200.3117 344.4030
## Nov 2013       304.9826 257.3361 352.6291 232.1135 377.8517
## Dec 2013       405.2774 357.0984 453.4565 331.5939 478.9610
summary(retail_holt2)
## 
## Forecast method: Holt-Winters' multiplicative method
## 
## Model Information:
## Holt-Winters' multiplicative method 
## 
## Call:
##  hw(y = train, h = 36, seasonal = "multiplicative") 
## 
##   Smoothing parameters:
##     alpha = 0.5915 
##     beta  = 1e-04 
##     gamma = 0.102 
## 
##   Initial states:
##     l = 62.8598 
##     b = 0.5652 
##     s = 0.9396 0.939 0.9122 1.4705 1.065 1.0179
##            0.9869 0.9426 0.9371 0.8902 1.01 0.8889
## 
##   sigma:  0.0438
## 
##      AIC     AICc      BIC 
## 3473.119 3474.991 3538.460 
## 
## Error measures:
##                      ME     RMSE      MAE         MPE     MAPE      MASE
## Training set 0.03021223 9.107356 6.553533 0.001995484 3.293399 0.4107058
##                    ACF1
## Training set 0.02752875
## 
## Forecasts:
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Jan 2011       259.0844 244.5405 273.6283 236.8414 281.3274
## Feb 2011       247.0492 230.9401 263.1582 222.4125 271.6858
## Mar 2011       260.0161 240.9994 279.0328 230.9325 289.0996
## Apr 2011       253.5505 233.1988 273.9022 222.4253 284.6758
## May 2011       266.1542 243.0536 289.2548 230.8249 301.4835
## Jun 2011       251.8347 228.4530 275.2165 216.0755 287.5940
## Jul 2011       258.4682 233.0067 283.9298 219.5281 297.4083
## Aug 2011       265.4052 237.8431 292.9673 223.2526 307.5578
## Sep 2011       273.7663 243.9497 303.5829 228.1658 319.3668
## Oct 2011       284.4787 252.1230 316.8343 234.9950 333.9624
## Nov 2011       307.7085 271.2909 344.1260 252.0126 363.4043
## Dec 2011       421.1372 369.4304 472.8439 342.0585 500.2158
## Jan 2012       265.5514 231.3917 299.7112 213.3086 317.7943
## Feb 2012       253.2031 219.6031 286.8032 201.8163 304.5899
## Mar 2012       266.4797 230.0704 302.8891 210.7965 322.1630
## Apr 2012       259.8406 223.3483 296.3328 204.0305 315.6506
## May 2012       272.7434 233.4303 312.0566 212.6192 332.8677
## Jun 2012       258.0567 219.9325 296.1809 199.7508 316.3627
## Jul 2012       264.8411 224.7875 304.8948 203.5843 326.0979
## Aug 2012       271.9359 229.8814 313.9903 207.6191 336.2526
## Sep 2012       280.4890 236.1780 324.8000 212.7212 348.2568
## Oct 2012       291.4503 244.4601 338.4405 219.5850 363.3156
## Nov 2012       315.2342 263.4074 367.0609 235.9720 394.4963
## Dec 2012       431.4162 359.1469 503.6856 320.8898 541.9427
## Jan 2013       272.0193 225.3028 318.7358 200.5726 343.4660
## Feb 2013       259.3578 214.0472 304.6685 190.0611 328.6546
## Mar 2013       272.9442 224.4666 321.4218 198.8042 347.0843
## Apr 2013       266.1314 218.1048 314.1579 192.6811 339.5816
## May 2013       279.3335 228.1423 330.5246 201.0433 357.6236
## Jun 2013       264.2795 215.1202 313.4388 189.0968 339.4621
## Jul 2013       271.2148 220.0322 322.3973 192.9378 349.4918
## Aug 2013       278.4673 225.1758 331.7588 196.9650 359.9695
## Sep 2013       287.2126 231.4964 342.9287 202.0021 372.4230
## Oct 2013       298.4228 239.7640 357.0816 208.7119 388.1337
## Nov 2013       322.7608 258.5003 387.0213 224.4828 421.0388
## Dec 2013       441.6966 352.6542 530.7391 305.5180 577.8753

#I prefer the multicplicative method as it appards to fit the data better graphically, even though the metrics are similiar.

plot(retail_holt2$residuals)

#Residuals look like white noise.

forecast::accuracy(retail_holt2, retail_test)
##                         ME       RMSE        MAE           MPE       MAPE
## Training set    0.03021223   9.107356   6.553533  1.995484e-03   3.293399
## Test set     -212.39542746 217.751474 212.395427 -2.984213e+02 298.421263
##                    MASE       ACF1 Theil's U
## Training set  0.4107058 0.02752875        NA
## Test set     13.3106891 0.24418966  16.27967

#9

retail_box <- BoxCox.lambda(train)
retail_stl = stlf(train, lambda=retail_box)
retail_ets= ets(seasadj(decompose(train, "multiplicative")))

autoplot(train, series="Train")+autolayer(forecast(retail_stl, PI=FALSE, series="stlf")) + autolayer(forecast(retail_ets, PI=FALSE,  series="ets"))

summary(retail_stl)
## 
## Forecast method: STL +  ETS(M,A,N)
## 
## Model Information:
## ETS(M,A,N) 
## 
## Call:
##  ets(y = na.interp(x), model = etsmodel, allow.multiplicative.trend = allow.multiplicative.trend) 
## 
##   Smoothing parameters:
##     alpha = 0.6233 
##     beta  = 0.005 
## 
##   Initial states:
##     l = 6.512 
##     b = 0.0175 
## 
##   sigma:  0.0113
## 
##      AIC     AICc      BIC 
## 450.3140 450.4910 469.5317 
## 
## Error measures:
##                      ME    RMSE      MAE       MPE     MAPE      MASE      ACF1
## Training set -0.6407263 7.98403 5.744013 -0.295841 2.831444 0.3599737 0.0223297
## 
## Forecasts:
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Jan 2011       259.6728 247.0973 272.7566 240.6408 279.8936
## Feb 2011       246.7769 232.5689 261.6729 225.3178 269.8452
## Mar 2011       261.0024 244.1827 278.7389 235.6376 288.5117
## Apr 2011       252.0341 233.9693 271.2009 224.8359 281.8101
## May 2011       264.6804 244.2462 286.4636 233.9531 298.5634
## Jun 2011       251.8326 230.7338 274.4514 220.1533 287.0680
## Jul 2011       260.1186 237.0369 284.9692 225.5011 298.8744
## Aug 2011       267.1748 242.2054 294.1679 229.7666 309.3175
## Sep 2011       274.5268 247.6512 303.6929 234.3039 320.1086
## Oct 2011       283.2186 254.3237 314.6893 240.0146 332.4490
## Nov 2011       306.3544 274.1901 341.4769 258.2951 361.3352
## Dec 2011       419.1673 375.9813 466.2442 354.6102 492.8279
## Jan 2012       265.2888 234.3426 299.4301 219.1749 318.8791
## Feb 2012       252.1685 221.4013 286.2743 206.3790 305.7704
## Mar 2012       266.6413 233.3552 303.6322 217.1359 324.8163
## Apr 2012       257.5175 224.1224 294.7907 207.9068 316.2037
## May 2012       270.3829 234.5804 310.4411 217.2303 333.4950
## Jun 2012       257.3124 221.9286 297.0846 204.8450 320.0499
## Jul 2012       265.7423 228.4146 307.8114 210.4316 332.1500
## Aug 2012       272.9202 233.7656 317.1687 214.9439 342.8182
## Sep 2012       280.3984 239.3576 326.9008 219.6716 353.9081
## Oct 2012       289.2386 246.1213 338.2143 225.4808 366.7086
## Nov 2012       312.7649 265.7327 366.2510 243.2400 397.3961
## Dec 2012       427.4063 365.3758 497.6127 335.5945 538.3538
retail_ets %>% summary() 
## ETS(M,A,N) 
## 
## Call:
##  ets(y = seasadj(decompose(train, "multiplicative"))) 
## 
##   Smoothing parameters:
##     alpha = 0.5989 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 65.5579 
##     b = 0.7729 
## 
##   sigma:  0.0403
## 
##      AIC     AICc      BIC 
## 3412.805 3412.982 3432.023 
## 
## Training set error measures:
##                      ME     RMSE      MAE        MPE     MAPE      MASE
## Training set -0.2829213 8.551268 6.201346 -0.2055279 3.091856 0.3886063
##                    ACF1
## Training set 0.02876641

#The STLF model was the most accurate of the 3 tested models.

#10

autoplot(ukcars)

#There appears to be seasonality of the data, but no trend. It looks like recessions drove the cars data down, which while cyclical is not time repetetive and so not seasonal.

ukcars %>% stl(s.window="periodic") %>% autoplot()

uk_cars_damped <- ukcars %>% stlf(h = 8, etsmodel = "AAN", damped = TRUE)
summary(uk_cars_damped)
## 
## Forecast method: STL +  ETS(A,Ad,N)
## 
## Model Information:
## ETS(A,Ad,N) 
## 
## Call:
##  ets(y = na.interp(x), model = etsmodel, damped = TRUE, allow.multiplicative.trend = allow.multiplicative.trend) 
## 
##   Smoothing parameters:
##     alpha = 0.763 
##     beta  = 0.0044 
##     phi   = 0.8 
## 
##   Initial states:
##     l = 308.4749 
##     b = 0.5404 
## 
##   sigma:  23.3371
## 
##      AIC     AICc      BIC 
## 1252.991 1253.784 1269.356 
## 
## Error measures:
##                    ME     RMSE      MAE        MPE     MAPE      MASE
## Training set 1.040088 22.81499 17.73846 -0.1296296 5.821059 0.5780878
##                    ACF1
## Training set 0.01590553
## 
## Forecasts:
##         Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 2005 Q2       413.0856 383.1779 442.9934 367.3457 458.8256
## 2005 Q3       365.8991 328.2152 403.5830 308.2666 423.5316
## 2005 Q4       398.3136 354.1598 442.4673 330.7862 465.8409
## 2006 Q1       432.6271 382.8059 482.4483 356.4322 508.8220
## 2006 Q2       412.9973 358.0678 467.9267 328.9899 497.0046
## 2006 Q3       365.8284 306.2101 425.4467 274.6501 457.0067
## 2006 Q4       398.2570 334.2801 462.2339 300.4128 496.1012
## 2007 Q1       432.5818 364.5155 500.6482 328.4833 536.6803
uk_cars_holt = ukcars %>% stlf(h = 8, etsmodel = "AAN", damped = FALSE)
summary(uk_cars_holt)
## 
## Forecast method: STL +  ETS(A,A,N)
## 
## Model Information:
## ETS(A,A,N) 
## 
## Call:
##  ets(y = na.interp(x), model = etsmodel, damped = FALSE, allow.multiplicative.trend = allow.multiplicative.trend) 
## 
##   Smoothing parameters:
##     alpha = 0.7446 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 328.0565 
##     b = 0.9695 
## 
##   sigma:  23.2798
## 
##      AIC     AICc      BIC 
## 1251.477 1252.038 1265.114 
## 
## Error measures:
##                      ME     RMSE    MAE        MPE     MAPE      MASE
## Training set -0.4024211 22.86408 17.732 -0.5948215 5.836029 0.5778774
##                    ACF1
## Training set 0.02416541
## 
## Forecasts:
##         Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 2005 Q2       414.5101 384.6758 444.3444 368.8824 460.1377
## 2005 Q3       368.3184 331.1201 405.5167 311.4285 425.2083
## 2005 Q4       401.7217 358.3919 445.0516 335.4545 467.9890
## 2006 Q1       437.0193 388.3226 485.7160 362.5442 511.4945
## 2006 Q2       418.3698 364.8404 471.8991 336.5036 500.2359
## 2006 Q3       372.1781 314.2164 430.1398 283.5334 460.8228
## 2006 Q4       405.5814 343.5021 467.6608 310.6392 500.5236
## 2007 Q1       440.8790 374.9376 506.8205 340.0303 541.7278
ukcars_ets <- ets(ukcars)
summary(ukcars_ets)
## ETS(A,N,A) 
## 
## Call:
##  ets(y = ukcars) 
## 
##   Smoothing parameters:
##     alpha = 0.6199 
##     gamma = 1e-04 
## 
##   Initial states:
##     l = 314.2568 
##     s = -1.7579 -44.9601 21.1956 25.5223
## 
##   sigma:  25.9302
## 
##      AIC     AICc      BIC 
## 1277.752 1278.819 1296.844 
## 
## Training set error measures:
##                    ME     RMSE      MAE        MPE     MAPE      MASE
## Training set 1.313884 25.23244 20.17907 -0.1570979 6.629003 0.6576259
##                    ACF1
## Training set 0.02573334

A,N,A model

#The damped method provides the best in sample fit.

plot(uk_cars_damped$residuals)

#11

autoplot(visitors)

#The data shows a positive trend and seasonality.

visitors_train <- subset(visitors, end = length(visitors) - 24)
visitors_test <- subset(visitors, start = length(visitors) - 23)
visitors_holt <- hw(visitors_train, h=24, seasonal="multiplicative")
summary(visitors_holt)                    
## 
## Forecast method: Holt-Winters' multiplicative method
## 
## Model Information:
## Holt-Winters' multiplicative method 
## 
## Call:
##  hw(y = visitors_train, h = 24, seasonal = "multiplicative") 
## 
##   Smoothing parameters:
##     alpha = 0.4379 
##     beta  = 0.0164 
##     gamma = 1e-04 
## 
##   Initial states:
##     l = 90.9387 
##     b = 2.584 
##     s = 0.945 1.0553 1.0882 0.9681 1.3072 1.0711
##            1.0264 0.9095 0.938 1.0401 0.8509 0.8001
## 
##   sigma:  0.0571
## 
##      AIC     AICc      BIC 
## 2326.608 2329.699 2383.988 
## 
## Error measures:
##                      ME     RMSE      MAE        MPE     MAPE      MASE
## Training set -0.9749466 14.06539 10.35763 -0.5792169 4.223204 0.3970304
##                   ACF1
## Training set 0.1356528
## 
## Forecasts:
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## May 2003       293.5512 272.0790 315.0235 260.7123 326.3902
## Jun 2003       311.3817 286.3466 336.4168 273.0939 349.6695
## Jul 2003       379.6865 346.4414 412.9316 328.8425 430.5305
## Aug 2003       341.5672 309.2321 373.9023 292.1150 391.0195
## Sep 2003       330.3820 296.7647 363.9994 278.9688 381.7953
## Oct 2003       371.9441 331.4615 412.4267 310.0313 433.8569
## Nov 2003       387.1607 342.2705 432.0509 318.5070 455.8144
## Dec 2003       471.3185 413.3054 529.3316 382.5951 560.0419
## Jan 2004       348.1741 302.8173 393.5309 278.8068 417.5414
## Feb 2004       390.3863 336.7055 444.0672 308.2885 472.4841
## Mar 2004       377.6150 322.9349 432.2951 293.9890 461.2410
## Apr 2004       337.3049 285.9782 388.6316 258.8075 415.8024
## May 2004       284.8762 239.4088 330.3437 215.3398 354.4127
## Jun 2004       302.1571 251.6621 352.6521 224.9316 379.3825
## Jul 2004       368.4105 304.0470 432.7739 269.9751 466.8459
## Aug 2004       331.3981 270.9579 391.8384 238.9627 423.8335
## Sep 2004       320.5215 259.5777 381.4653 227.3160 413.7270
## Oct 2004       360.8154 289.3783 432.2525 251.5618 470.0690
## Nov 2004       375.5478 298.2123 452.8832 257.2733 493.8222
## Dec 2004       457.1458 359.3352 554.9564 307.5574 606.7342
## Jan 2005       337.6781 262.6845 412.6717 222.9853 452.3709
## Feb 2005       378.5881 291.3958 465.7805 245.2390 511.9373
## Mar 2005       366.1740 278.7938 453.5542 232.5375 499.8105
## Apr 2005       327.0594 246.2591 407.8596 203.4861 450.6326

#Multiplicative is necessary to account for the increasing magnitude of seasonal changes.

visitors_ets <- forecast(ets(visitors_train), h = 24)
autoplot(visitors_ets)

visitors_ets_box <- forecast(ets(visitors_train, lambda = BoxCox.lambda(visitors_train),additive.only = TRUE),  h = 24)
autoplot(visitors_ets_box)

visitors_snaive = snaive(visitors_train, h = 24)
autoplot(visitors_snaive)

visitors_confusing = visitors_train %>%
  stlm(    lambda = BoxCox.lambda(visitors_train),s.window = 13,robust = TRUE,method = "ets") %>%
  forecast(h = 24)
autoplot(visitors_confusing)

forecast::accuracy(visitors_ets, visitors_test)
##                      ME     RMSE      MAE        MPE      MAPE     MASE
## Training set  0.7640074 14.53480 10.57657  0.1048224  3.994788 0.405423
## Test set     72.1992664 80.23124 74.55285 15.9202832 16.822384 2.857773
##                     ACF1 Theil's U
## Training set -0.05311217        NA
## Test set      0.58716982  1.127269
forecast::accuracy(visitors_ets_box, visitors_test)
##                     ME     RMSE      MAE        MPE      MAPE      MASE
## Training set  1.001363 14.97096 10.82396  0.1609336  3.974215 0.4149057
## Test set     69.458843 78.61032 72.41589 15.1662261 16.273089 2.7758586
##                     ACF1 Theil's U
## Training set -0.02535299        NA
## Test set      0.67684148  1.086953
forecast::accuracy(visitors_snaive, visitors_test)
##                    ME     RMSE      MAE      MPE      MAPE     MASE      ACF1
## Training set 17.29363 31.15613 26.08775 7.192445 10.285961 1.000000 0.6327669
## Test set     32.87083 50.30097 42.24583 6.640781  9.962647 1.619375 0.5725430
##              Theil's U
## Training set        NA
## Test set     0.6594016
forecast::accuracy(visitors_confusing, visitors_test)
##                      ME     RMSE       MAE         MPE     MAPE      MASE
## Training set  0.5803348 13.36431  9.551391  0.08767744  3.51950 0.3661256
## Test set     76.3637263 84.24658 78.028992 16.87750474 17.51578 2.9910209
##                     ACF1 Theil's U
## Training set -0.05924203        NA
## Test set      0.64749552  1.178154

#The ets with Box Cox transformation had the best performance on the training set, but the snaive had the best performance on the test set. Residuals of the snaive model look like white noise.

plot(visitors_snaive$residuals)

tsCV(visitors, forecastfunction = visitors_ets)
##      Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec
## 1985                  NA  NA  NA  NA  NA  NA  NA  NA
## 1986  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1987  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1988  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1989  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1990  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1991  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1992  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1993  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1994  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1995  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1996  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1997  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1998  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1999  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2000  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2001  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2002  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2003  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2004  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2005  NA  NA  NA  NA
tsCV(visitors, forecastfunction = visitors_ets_box)
##      Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec
## 1985                  NA  NA  NA  NA  NA  NA  NA  NA
## 1986  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1987  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1988  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1989  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1990  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1991  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1992  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1993  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1994  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1995  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1996  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1997  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1998  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1999  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2000  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2001  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2002  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2003  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2004  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2005  NA  NA  NA  NA
tsCV(visitors, forecastfunction= visitors_snaive)
##      Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec
## 1985                  NA  NA  NA  NA  NA  NA  NA  NA
## 1986  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1987  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1988  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1989  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1990  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1991  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1992  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1993  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1994  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1995  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1996  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1997  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1998  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1999  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2000  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2001  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2002  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2003  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2004  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2005  NA  NA  NA  NA
tsCV(visitors, forecastfunction= visitors_confusing)    
##      Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec
## 1985                  NA  NA  NA  NA  NA  NA  NA  NA
## 1986  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1987  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1988  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1989  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1990  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1991  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1992  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1993  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1994  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1995  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1996  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1997  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1998  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 1999  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2000  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2001  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2002  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2003  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2004  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA  NA
## 2005  NA  NA  NA  NA

#Yeah, blank, but I think structured correctly in general. I’ll come back if I have time. SO MUCH CONTENT.

fets <- function(y, h) {
  forecast(ets(y), h = h)
}
qcement_ets = tsCV(y=qcement, forecastfunction=fets, h=4)
summary(qcement_ets)
##       h=1                 h=2                 h=3                 h=4          
##  Min.   :-0.284697   Min.   :-0.505359   Min.   :-0.467304   Min.   :-0.45008  
##  1st Qu.:-0.040635   1st Qu.:-0.043311   1st Qu.:-0.052818   1st Qu.:-0.05847  
##  Median : 0.003049   Median : 0.005511   Median : 0.017342   Median : 0.01804  
##  Mean   : 0.001308   Mean   : 0.003296   Mean   : 0.002916   Mean   : 0.00374  
##  3rd Qu.: 0.046562   3rd Qu.: 0.065395   3rd Qu.: 0.076212   3rd Qu.: 0.07748  
##  Max.   : 0.273002   Max.   : 0.307940   Max.   : 0.316248   Max.   : 0.34043  
##  NA's   :1           NA's   :2           NA's   :3           NA's   :4
autoplot(qcement_ets)

qcement_snaive = tsCV(qcement, snaive, h=4)
forecast(qcement_snaive, h=4)
## Warning in ets(object, lambda = lambda, biasadj = biasadj,
## allow.multiplicative.trend = allow.multiplicative.trend, : Missing values
## encountered. Using longest contiguous portion of time series

## Warning in ets(object, lambda = lambda, biasadj = biasadj,
## allow.multiplicative.trend = allow.multiplicative.trend, : Missing values
## encountered. Using longest contiguous portion of time series

## Warning in ets(object, lambda = lambda, biasadj = biasadj,
## allow.multiplicative.trend = allow.multiplicative.trend, : Missing values
## encountered. Using longest contiguous portion of time series

## Warning in ets(object, lambda = lambda, biasadj = biasadj,
## allow.multiplicative.trend = allow.multiplicative.trend, : Missing values
## encountered. Using longest contiguous portion of time series
## h=1
##         Point Forecast       Lo 80     Hi 80      Lo 95     Hi 95
## 2014 Q1      0.1589326  0.01885626 0.2990090 -0.0552957 0.3731610
## 2014 Q2      0.1589326 -0.01615530 0.3340206 -0.1088413 0.4267065
## 2014 Q3      0.1589326 -0.04524910 0.3631144 -0.1533364 0.4712016
## 2014 Q4      0.1589326 -0.07068569 0.3885510 -0.1922383 0.5101035
## 
## h=2
##         Point Forecast       Lo 80     Hi 80       Lo 95     Hi 95
## 2013 Q4      0.1588659  0.01811513 0.2996167 -0.05639383 0.3741257
## 2014 Q1      0.1588659 -0.01695686 0.3346887 -0.11003182 0.4277637
## 2014 Q2      0.1588659 -0.04611339 0.3638452 -0.15462288 0.4723547
## 2014 Q3      0.1588659 -0.07161051 0.3893423 -0.19361736 0.5113492
## 
## h=3
##         Point Forecast       Lo 80     Hi 80       Lo 95     Hi 95
## 2013 Q3      0.1589197  0.01770741 0.3001320 -0.05704588 0.3748853
## 2013 Q4      0.1589197 -0.01756711 0.3354066 -0.11099360 0.4288330
## 2014 Q1      0.1589197 -0.04688185 0.3647213 -0.15582662 0.4736661
## 2014 Q2      0.1589197 -0.07251271 0.3903522 -0.19502564 0.5128651
## 
## h=4
##         Point Forecast       Lo 80     Hi 80       Lo 95     Hi 95
## 2013 Q2      0.1589277  0.01710656 0.3007488 -0.05796901 0.3758244
## 2013 Q3      0.1589277 -0.01833302 0.3361884 -0.11216917 0.4300245
## 2013 Q4      0.1589277 -0.04778343 0.3656388 -0.15720969 0.4750650
## 2014 Q1      0.1589277 -0.07353223 0.3913876 -0.19658908 0.5144444
autoplot(qcement_snaive)+autolayer(forecast(qcement_snaive, h=4))
## Warning in ets(object, lambda = lambda, biasadj = biasadj,
## allow.multiplicative.trend = allow.multiplicative.trend, : Missing values
## encountered. Using longest contiguous portion of time series

## Warning in ets(object, lambda = lambda, biasadj = biasadj,
## allow.multiplicative.trend = allow.multiplicative.trend, : Missing values
## encountered. Using longest contiguous portion of time series

## Warning in ets(object, lambda = lambda, biasadj = biasadj,
## allow.multiplicative.trend = allow.multiplicative.trend, : Missing values
## encountered. Using longest contiguous portion of time series

## Warning in ets(object, lambda = lambda, biasadj = biasadj,
## allow.multiplicative.trend = allow.multiplicative.trend, : Missing values
## encountered. Using longest contiguous portion of time series

print("MSE ets")
## [1] "MSE ets"
mean(qcement_ets^2,na.rm=T)
## [1] 0.01250954
print("MSE snaive")
## [1] "MSE snaive"
mean(qcement_snaive^2,na.rm=T)
## [1] 0.01792147

#The ETS model had a lower MSE. This makes sense as it is not naive and can better adjust to data. There are missing values becasue it is leveraging rolling periods. At t2, there is no data to utilize 4 rolling periods back.

#13

bricksq.BC <- BoxCox(bricksq, BoxCox.lambda(bricksq))
ausbeer.train <- window(ausbeer, end = c(2007,2))
ausbeer.test  <- window(ausbeer, start = c(2007,3))
bricksq.BC.train    <- window(bricksq.BC, end = c(1991,1))
bricksq.BC.test    <- window(bricksq.BC, start = c(1991,2))
dole.train <- window(dole, end=c(1989,7))
dole.test <- window(dole, start=c(1989,8))
a10.train <- window(a10, end = c(2005,6))
a10.test <- window(a10, start = c(2005,7))
h02.train <- window(h02, end = c(2005,7))
h02.test <- window(h02, start = c(2005,8))
usmelec.train <- window(usmelec, end = c(2010,6))
usmelec.test <- window(usmelec, start = c(2010,7))

#Ausbeer

print("ets")
## [1] "ets"
forecast::accuracy(forecast(ets(ausbeer.train), h=12),ausbeer.test)
##                      ME      RMSE       MAE          MPE     MAPE      MASE
## Training set -0.3466095 15.781973 11.979955 -0.064073435 2.864311 0.7567076
## Test set      0.1272571  9.620828  8.919347  0.009981598 2.132836 0.5633859
##                    ACF1 Theil's U
## Training set -0.1942276        NA
## Test set      0.3763918 0.1783972
print("snaive")
## [1] "snaive"
forecast::accuracy(forecast(snaive(ausbeer.train, h=12), h=12),ausbeer.test)
##                     ME     RMSE      MAE        MPE     MAPE      MASE
## Training set  3.336634 19.67005 15.83168  0.9044009 3.771370 1.0000000
## Test set     -2.916667 10.80509  9.75000 -0.6505927 2.338012 0.6158537
##                      ACF1 Theil's U
## Training set 0.0009690785        NA
## Test set     0.3254581810 0.1981463
print("stlf")
## [1] "stlf"
forecast::accuracy(forecast(stlf(ausbeer.train, h=12), h=12),ausbeer.test)
##                       ME      RMSE       MAE         MPE     MAPE      MASE
## Training set -0.08449615 13.640828 10.495557 -0.02319029 2.523657 0.6629464
## Test set     -0.74491615  9.596294  9.014064 -0.20867707 2.159142 0.5693686
##                    ACF1 Theil's U
## Training set -0.1623773        NA
## Test set      0.3154543 0.1712144

#Best= stlf

#Bricksq

print("ets")
## [1] "ets"
forecast::accuracy(forecast(ets(bricksq.BC.train), h=12),bricksq.BC.test)
##                      ME      RMSE       MAE       MPE     MAPE      MASE
## Training set 0.01996121 0.2419629 0.1755865 0.1409763 1.247006 0.4329129
## Test set     0.20641501 0.2854117 0.2458899 1.4176042 1.692439 0.6062480
##                   ACF1 Theil's U
## Training set 0.1434416        NA
## Test set     0.3798615  0.793706
print("snaive")
## [1] "snaive"
forecast::accuracy(forecast(snaive(bricksq.BC.train, h=12), h=12),bricksq.BC.test)
##                      ME      RMSE       MAE        MPE     MAPE     MASE
## Training set  0.1051226 0.5448411 0.4055930  0.7368813 2.873281 1.000000
## Test set     -0.3353677 0.5781175 0.4975619 -2.2995568 3.439080 1.226752
##                     ACF1 Theil's U
## Training set  0.77776982        NA
## Test set     -0.05416358  1.488247
print("stlf")
## [1] "stlf"
forecast::accuracy(forecast(stlf(bricksq.BC.train, h=12), h=12),bricksq.BC.test)
##                     ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.0208919 0.2258669 0.1671388 0.150274 1.185157 0.4120850
## Test set     0.1568390 0.2683405 0.2296609 1.077773 1.585394 0.5662348
##                   ACF1 Theil's U
## Training set 0.2125626        NA
## Test set     0.3479158 0.7546849

#Best= stlf

#dole

print("ets")
## [1] "ets"
forecast::accuracy(forecast(ets(dole.train), h=12),dole.test)
##                       ME     RMSE       MAE       MPE    MAPE      MASE
## Training set    98.00253 16130.58  9427.497 0.5518769 6.20867 0.2995409
## Test set     27788.36057 33298.90 27788.361 6.9987596 6.99876 0.8829225
##                   ACF1 Theil's U
## Training set 0.5139536        NA
## Test set     0.4229686  2.661928
print("snaive")
## [1] "snaive"
forecast::accuracy(forecast(snaive(dole.train, h=12), h=12),dole.test)
##                     ME     RMSE      MAE       MPE     MAPE     MASE      ACF1
## Training set  12606.58 56384.06 31473.16  3.350334 27.73651 1.000000 0.9781058
## Test set     -28527.50 53010.22 47640.67 -7.974568 12.40354 1.513692 0.7245915
##              Theil's U
## Training set        NA
## Test set      4.091062
print("stlf")
## [1] "stlf"
forecast::accuracy(forecast(stlf(dole.train, h=12), h=12),dole.test)
##                      ME      RMSE       MAE       MPE     MAPE      MASE
## Training set   256.3644  6248.781  3836.768 0.5425694 4.804379 0.1219060
## Test set     12251.8901 29920.856 20396.704 2.8597442 4.946181 0.6480666
##                    ACF1 Theil's U
## Training set 0.01337877        NA
## Test set     0.64087706  2.371564

#Best= stlf

#a10

print("ets")
## [1] "ets"
forecast::accuracy(forecast(ets(a10.train), h=12),a10.test)
##                      ME     RMSE       MAE       MPE     MAPE      MASE
## Training set 0.04378049 0.488989 0.3542285 0.2011915 3.993267 0.3611299
## Test set     0.11379391 1.023291 0.8808195 0.2374252 5.155528 0.8979803
##                     ACF1 Theil's U
## Training set -0.05238173        NA
## Test set     -0.39464995 0.3207898
print("snaive")
## [1] "snaive"
forecast::accuracy(forecast(snaive(a10.train, h=12), h=12),a10.test)
##                     ME     RMSE       MAE       MPE      MAPE     MASE
## Training set 0.9577207 1.177528 0.9808895 10.862831 11.157672 1.000000
## Test set     1.4016923 1.704054 1.4393090  7.698516  7.962797 1.467351
##                    ACF1 Theil's U
## Training set  0.3779746        NA
## Test set     -0.7220504 0.5840003
print("stlf")
## [1] "stlf"
forecast::accuracy(forecast(stlf(a10.train, h=12), h=12),a10.test)
##                     ME      RMSE       MAE       MPE     MAPE      MASE
## Training set 0.0554619 0.4582707 0.3448964 0.3743525 4.015951 0.3516159
## Test set     0.3270847 1.3596831 1.1499080 0.7849906 6.684667 1.1723114
##                     ACF1 Theil's U
## Training set  0.02360657        NA
## Test set     -0.16012041 0.4000005

#Best=ets

#H02

print("ets")
## [1] "ets"
forecast::accuracy(forecast(ets(h02.train), h=12),h02.test)
##                        ME       RMSE        MAE        MPE     MAPE      MASE
## Training set 0.0004513622 0.04579930 0.03426080 -0.2209876 4.560572 0.5745005
## Test set     0.0232785622 0.06402092 0.05309284  1.7574790 5.577604 0.8902845
##                     ACF1 Theil's U
## Training set  0.05057522        NA
## Test set     -0.38030773 0.3337125
print("snaive")
## [1] "snaive"
forecast::accuracy(forecast(snaive(h02.train, h=12), h=12),h02.test)
##                       ME       RMSE        MAE       MPE     MAPE     MASE
## Training set  0.03774904 0.07171709 0.05963581  5.094248 8.197262 1.000000
## Test set     -0.01621003 0.07156409 0.05756597 -1.347556 6.137698 0.965292
##                    ACF1 Theil's U
## Training set 0.40080565        NA
## Test set     0.01845296 0.4285937
print("stlf")
## [1] "stlf"
forecast::accuracy(forecast(stlf(h02.train, h=12), h=12),h02.test)
##                        ME       RMSE        MAE        MPE     MAPE      MASE
## Training set -0.000830773 0.04053338 0.03033702 -0.2685748 4.274993 0.5087047
## Test set      0.013985124 0.06423336 0.05508629  0.3780307 5.956852 0.9237115
##                     ACF1 Theil's U
## Training set -0.06958533        NA
## Test set     -0.10704436 0.3342446

#Best=ets

#usmelec

print("ets")
## [1] "ets"
forecast::accuracy(forecast(ets(usmelec.train), h=12),usmelec.test)
##                       ME     RMSE      MAE        MPE     MAPE      MASE
## Training set -0.07834851 7.447167 5.601963 -0.1168709 2.163495 0.6225637
## Test set     -1.17486480 7.974503 6.633269 -0.5181480 1.933084 0.7371760
##                   ACF1 Theil's U
## Training set 0.2719249        NA
## Test set     0.2142856 0.2257995
print("snaive")
## [1] "snaive"
forecast::accuracy(forecast(snaive(usmelec.train, h=12), h=12),usmelec.test)
##                    ME     RMSE       MAE      MPE     MAPE     MASE      ACF1
## Training set 4.921564 11.58553  8.998217 2.000667 3.511648 1.000000 0.4841860
## Test set     9.155417 16.08331 12.285917 2.473758 3.402411 1.365372 0.4247762
##              Theil's U
## Training set        NA
## Test set      0.332999
print("stlf")
## [1] "stlf"
forecast::accuracy(forecast(stlf(usmelec.train, h=12), h=12),usmelec.test)
##                      ME     RMSE      MAE         MPE     MAPE      MASE
## Training set -0.0657726 6.192160 4.682101 -0.06613967 1.826421 0.5203366
## Test set     -3.8976853 9.228437 7.981593 -1.34866348 2.401607 0.8870194
##                   ACF1 Theil's U
## Training set 0.1106876        NA
## Test set     0.2390297 0.2744262

#Best = stlf

ets(bicoal)
## ETS(M,N,N) 
## 
## Call:
##  ets(y = bicoal) 
## 
##   Smoothing parameters:
##     alpha = 0.8205 
## 
##   Initial states:
##     l = 542.665 
## 
##   sigma:  0.1262
## 
##      AIC     AICc      BIC 
## 595.2499 595.7832 600.9253
ets(chicken)
## ETS(M,N,N) 
## 
## Call:
##  ets(y = chicken) 
## 
##   Smoothing parameters:
##     alpha = 0.98 
## 
##   Initial states:
##     l = 159.8322 
## 
##   sigma:  0.1691
## 
##      AIC     AICc      BIC 
## 635.2382 635.6018 641.9836
ets(dole)
## ETS(M,Ad,M) 
## 
## Call:
##  ets(y = dole) 
## 
##   Smoothing parameters:
##     alpha = 0.697 
##     beta  = 0.124 
##     gamma = 0.303 
##     phi   = 0.902 
## 
##   Initial states:
##     l = 2708.6621 
##     b = 836.017 
##     s = 1.0404 0.8893 0.9103 1.0301 1.0576 1.0584
##            0.9801 0.9632 1.021 0.9838 1.0145 1.0514
## 
##   sigma:  0.0935
## 
##      AIC     AICc      BIC 
## 10602.67 10604.30 10676.19
ets(usdeaths)
## ETS(A,N,A) 
## 
## Call:
##  ets(y = usdeaths) 
## 
##   Smoothing parameters:
##     alpha = 0.5972 
##     gamma = 0.0019 
## 
##   Initial states:
##     l = 9195.6403 
##     s = -62.6129 -270.0351 263.3823 -89.4907 1005.529 1662.647
##            795.2585 333.326 -551.161 -737.5102 -1552.872 -796.4611
## 
##   sigma:  294.4663
## 
##      AIC     AICc      BIC 
## 1141.016 1149.587 1175.166
ets(lynx)
## ETS(M,N,N) 
## 
## Call:
##  ets(y = lynx) 
## 
##   Smoothing parameters:
##     alpha = 0.9999 
## 
##   Initial states:
##     l = 2372.8047 
## 
##   sigma:  0.9594
## 
##      AIC     AICc      BIC 
## 2058.138 2058.356 2066.346
ets(ibmclose)
## ETS(A,N,N) 
## 
## Call:
##  ets(y = ibmclose) 
## 
##   Smoothing parameters:
##     alpha = 0.9999 
## 
##   Initial states:
##     l = 459.9339 
## 
##   sigma:  7.2637
## 
##      AIC     AICc      BIC 
## 3648.450 3648.515 3660.182
ets(eggs)
## ETS(M,N,N) 
## 
## Call:
##  ets(y = eggs) 
## 
##   Smoothing parameters:
##     alpha = 0.8198 
## 
##   Initial states:
##     l = 278.8889 
## 
##   sigma:  0.1355
## 
##      AIC     AICc      BIC 
## 1043.286 1043.553 1050.916

#ETS does not always give forecasts as it focuses on trend and seasonal patterns. IDMCLOSE, for example, is an example of a random walk with no seasonal or trend component, and so ets does not model it well.

#15

forecast(ets(usdeaths), h=1, model="MAM")
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Jan 1979       8345.599 7968.225 8722.973 7768.455 8922.742
hw2 = hw(usdeaths, seasonal="multiplicative",h=1)
hw2$mean
##          Jan
## 1979 8217.64

#16

generic_word = ets(ibmclose, model="ANN")
generic_word
## ETS(A,N,N) 
## 
## Call:
##  ets(y = ibmclose, model = "ANN") 
## 
##   Smoothing parameters:
##     alpha = 0.9999 
## 
##   Initial states:
##     l = 459.9339 
## 
##   sigma:  7.2637
## 
##      AIC     AICc      BIC 
## 3648.450 3648.515 3660.182
generic_forecast= forecast(generic_word,1)
generic_forecast
##     Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 370       356.9995 347.6907 366.3083 342.7629 371.2361
.9999^2*(1+.9999^2*(1))
## [1] 1.9994

#I’ll come back to above if I have time. I think I need to calculate the variance of the residuals. and replace where I put alpha with that.

#17, utilizing lt,a,h,sigma from above

356.995+(7.2637*(1+(.999^2)*(1)))
## [1] 371.5079
356.995-(7.2637*(1+(.999^2)*(1)))
## [1] 342.4821

##Chapter 8

#1 #a. For white noise, we expect each autocorrelation to be close to 0. In each example, they are, with no points significantly breaking that rule. #b. The autocorrelations are different in each figure as they analyse different data series. White noise only means that no critical values fall outside the sqrt(T) tolerance band, not that all ACF plots look the same. The critical values are at different distances from the mean because the data sets are different, and the stronger autocorrelations occur between different time points (still not violating white noise)

#2

autoplot(ibmclose)

checkresiduals(ibmclose)
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

pacf(ibmclose)

#There is no evidence of trend or seasonality and so the data is considered stationary. The partial ACF also shows that the ACF drops to 0 very quickly, indicating that only the prior period is autocorrelated with the subject period, another indiciation of stationary data.

#3 #a

usnetelec  %>% diff() %>% ur.kpss() %>% summary()
## 
## ####################### 
## # KPSS Unit Root Test # 
## ####################### 
## 
## Test is of type: mu with 3 lags. 
## 
## Value of test-statistic is: 0.1585 
## 
## Critical value for a significance level of: 
##                 10pct  5pct 2.5pct  1pct
## critical values 0.347 0.463  0.574 0.739

#b

usgdp %>% diff() %>% ur.kpss() %>% summary()
## 
## ####################### 
## # KPSS Unit Root Test # 
## ####################### 
## 
## Test is of type: mu with 4 lags. 
## 
## Value of test-statistic is: 1.7909 
## 
## Critical value for a significance level of: 
##                 10pct  5pct 2.5pct  1pct
## critical values 0.347 0.463  0.574 0.739
usgdp %>% diff() %>% diff()%>% ur.kpss() %>% summary()
## 
## ####################### 
## # KPSS Unit Root Test # 
## ####################### 
## 
## Test is of type: mu with 4 lags. 
## 
## Value of test-statistic is: 0.021 
## 
## Critical value for a significance level of: 
##                 10pct  5pct 2.5pct  1pct
## critical values 0.347 0.463  0.574 0.739

#c

mcopper %>% diff() %>% ur.kpss() %>% summary()
## 
## ####################### 
## # KPSS Unit Root Test # 
## ####################### 
## 
## Test is of type: mu with 6 lags. 
## 
## Value of test-statistic is: 0.1843 
## 
## Critical value for a significance level of: 
##                 10pct  5pct 2.5pct  1pct
## critical values 0.347 0.463  0.574 0.739
enplanements %>% diff() %>% ur.kpss() %>% summary()
## 
## ####################### 
## # KPSS Unit Root Test # 
## ####################### 
## 
## Test is of type: mu with 5 lags. 
## 
## Value of test-statistic is: 0.0086 
## 
## Critical value for a significance level of: 
##                 10pct  5pct 2.5pct  1pct
## critical values 0.347 0.463  0.574 0.739

#e.

visitors %>% diff() %>% ur.kpss() %>% summary()
## 
## ####################### 
## # KPSS Unit Root Test # 
## ####################### 
## 
## Test is of type: mu with 4 lags. 
## 
## Value of test-statistic is: 0.0191 
## 
## Critical value for a significance level of: 
##                 10pct  5pct 2.5pct  1pct
## critical values 0.347 0.463  0.574 0.739

#4 #((1-B)^d)(yt)

#5

train %>% diff() %>% ur.kpss() %>% summary()
## 
## ####################### 
## # KPSS Unit Root Test # 
## ####################### 
## 
## Test is of type: mu with 5 lags. 
## 
## Value of test-statistic is: 0.0133 
## 
## Critical value for a significance level of: 
##                 10pct  5pct 2.5pct  1pct
## critical values 0.347 0.463  0.574 0.739

#6.

y <- ts(numeric(100))
e <- rnorm(100)

ct_ar = function(phi,y,e){
for(i in 2:100){
  y[i] <- phi*y[i-1] + e[i]}
return(y)}
ex_1= ct_ar(.6,y,e)
autoplot(ex_1)

ex_1= ct_ar(1,y,e)
autoplot(ex_1)

#Higher phi makes the model less reactive to data.

#c #Same as above

y <- ts(numeric(100))
e <- rnorm(100)

ct_ma = function(phi,y,e){
for(i in 2:100){
  y[i] <- phi*y[i-1] + e[i]}
return(y)}
autoplot(ct_ma(.6,y,e))

autoplot(ct_ma(1,y,e))

#e

ct_ma2 = function(num1, phi,y,e){
  y <- ts(numeric(num1))
  e <- rnorm(num1)
for(i in 2:100){
  y[i] <- phi*y[i-1] + e[i]}
return(y)}
autoplot(ct_ma2(100,.6,.6,1))

 y <- ts(numeric(100))
 e <- rnorm(100)
for (i in 3:100){
  y[i] <- (-.8)*y[i-1]+ (-.3)*y[i-2]+e[i]}
autoplot(y)

#The second series is more responsive to the data.

#7

autoplot(wmurders)

wmurders %>% ur.kpss() %>% summary()
## 
## ####################### 
## # KPSS Unit Root Test # 
## ####################### 
## 
## Test is of type: mu with 3 lags. 
## 
## Value of test-statistic is: 0.6331 
## 
## Critical value for a significance level of: 
##                 10pct  5pct 2.5pct  1pct
## critical values 0.347 0.463  0.574 0.739

#No differencing necessary.

checkresiduals(wmurders)
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

#a. ACF shows that data looks like white noise(quickly decreasing). I think an ARIMA(0,0,0) model would be appropiate. #b. No, a constant changes the long term forecasts, as the data is essentially white noise, theres really nothing to determine an appropiate forecast manipulation which the constant provides. #c.((1-B)^d)(yt) #d. 

wmurder_arima = arima(wmurders, order=c(0,0,0))
wmurder_arima
## 
## Call:
## arima(x = wmurders, order = c(0, 0, 0))
## 
## Coefficients:
##       intercept
##          3.3927
## s.e.     0.1000
## 
## sigma^2 estimated as 0.5503:  log likelihood = -61.62,  aic = 127.23
wmurder_arima_forecast = forecast(wmurder_arima, h=3)
autoplot(wmurder_arima_forecast)

wmurder_autoarima= auto.arima(wmurders)
wmurder_autoarima
## Series: wmurders 
## ARIMA(1,2,1) 
## 
## Coefficients:
##           ar1      ma1
##       -0.2434  -0.8261
## s.e.   0.1553   0.1143
## 
## sigma^2 estimated as 0.04632:  log likelihood=6.44
## AIC=-6.88   AICc=-6.39   BIC=-0.97
wmurder_autoarima_forecast = forecast(wmurder_autoarima,h=3)
autoplot(wmurder_autoarima_forecast)

#Autoarima did a much better job, with an exceptional AIC and a better graphical fit. It is differenced and includes a constant, with a 1 order autoregressive part and a 1 order moving average part.

#8.

austa_auto = auto.arima(austa)
austa_auto
## Series: austa 
## ARIMA(0,1,1) with drift 
## 
## Coefficients:
##          ma1   drift
##       0.3006  0.1735
## s.e.  0.1647  0.0390
## 
## sigma^2 estimated as 0.03376:  log likelihood=10.62
## AIC=-15.24   AICc=-14.46   BIC=-10.57
austa_auto %>% checkresiduals()

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(0,1,1) with drift
## Q* = 2.297, df = 5, p-value = 0.8067
## 
## Model df: 2.   Total lags used: 7
austa_auto %>% forecast(., h=10) %>% autoplot()

arima(austa,order=c(0,1,1)) %>% forecast(,h=10) %>% autoplot()

 arima(austa,order=c(0,1,0)) %>% forecast(,h=10) %>% autoplot()

#(2,1,3) Doesn’t run

arima(austa,order=c(2,1,0)) %>% forecast(,h=10) %>% autoplot()

arima(austa,order=c(0,0,1)) %>% forecast(,h=10) %>% autoplot()

arima(austa,order=c(0,0,0)) %>% forecast(,h=10) %>% autoplot()

arima(austa,order=c(0,2,1)) %>% forecast(,h=10) %>% autoplot()

#9.

autoplot(usgdp)

usgdp%>% ur.kpss() %>% summary()
## 
## ####################### 
## # KPSS Unit Root Test # 
## ####################### 
## 
## Test is of type: mu with 4 lags. 
## 
## Value of test-statistic is: 4.6556 
## 
## Critical value for a significance level of: 
##                 10pct  5pct 2.5pct  1pct
## critical values 0.347 0.463  0.574 0.739
usgdp2 = usgdp %>% BoxCox.lambda()
summary(auto.arima(usgdp, lambda=usgdp2))
## Series: usgdp 
## ARIMA(2,1,0) with drift 
## Box Cox transformation: lambda= 0.366352 
## 
## Coefficients:
##          ar1     ar2   drift
##       0.2795  0.1208  0.1829
## s.e.  0.0647  0.0648  0.0202
## 
## sigma^2 estimated as 0.03518:  log likelihood=61.56
## AIC=-115.11   AICc=-114.94   BIC=-101.26
## 
## Training set error measures:
##                    ME    RMSE      MAE         MPE      MAPE      MASE
## Training set 1.195275 39.2224 29.29521 -0.01363259 0.6863491 0.1655687
##                     ACF1
## Training set -0.03824844
summary(usgdp %>% arima(., order=c(1,1,1)))
## 
## Call:
## arima(x = ., order = c(1, 1, 1))
## 
## Coefficients:
##          ar1      ma1
##       0.9624  -0.6426
## s.e.  0.0256   0.0871
## 
## sigma^2 estimated as 1717:  log likelihood = -1214.39,  aic = 2434.79
## 
## Training set error measures:
## Warning in trainingaccuracy(object, test, d, D): test elements must be within
## sample
##               ME RMSE MAE MPE MAPE
## Training set NaN  NaN NaN NaN  NaN
summary(usgdp %>% arima(., order=c(0,1,0)))
## 
## Call:
## arima(x = ., order = c(0, 1, 0))
## 
## 
## sigma^2 estimated as 3832:  log likelihood = -1308.5,  aic = 2619.01
## 
## Training set error measures:
## Warning in trainingaccuracy(object, test, d, D): test elements must be within
## sample
##               ME RMSE MAE MPE MAPE
## Training set NaN  NaN NaN NaN  NaN

#Best model is from auto,arima

checkresiduals(auto.arima(usgdp, lambda=usgdp2))

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(2,1,0) with drift
## Q* = 6.5772, df = 5, p-value = 0.254
## 
## Model df: 3.   Total lags used: 8
autoplot(forecast(auto.arima(usgdp, lambda=usgdp2), h=10))

#Looks reasonable and dreamy.

autoplot(forecast(ets(usgdp), h=10))

forecast::accuracy(ets(usgdp))
##                   ME     RMSE      MAE        MPE      MAPE     MASE       ACF1
## Training set 1.30185 41.53448 31.85324 0.02319607 0.7543931 0.180026 0.07596492
forecast::accuracy(auto.arima(usgdp, lambda=usgdp2))
##                    ME    RMSE      MAE         MPE      MAPE      MASE
## Training set 1.195275 39.2224 29.29521 -0.01363259 0.6863491 0.1655687
##                     ACF1
## Training set -0.03824844

#Autoarima model is better, with a better ME, RMSE, MPE

#10

tsdisplay(austourists)

#This plot shows seasonal and trend components. The ACF shows significant autocorrelations, that appear seasonal in nature. The PACF shows significant correlations at the first 4 lags, followed by insignificant correlations at higher lags.

austourists %>% diff()  %>% ur.kpss()
## 
## ####################################### 
## # KPSS Unit Root / Cointegration Test # 
## ####################################### 
## 
## The value of the test statistic is: 0.4038
austourists %>% diff() %>% tsdisplay()

#The differencing removed the trend component, but left the seasonal component. This is evidenced by the plot as well as the ACF, which shows autocorrelations at seasonal lags. I think a model with combined differencing, autoregression and moving average may work

austourists %>% arima(., order=c(1,1,1)) 
## 
## Call:
## arima(x = ., order = c(1, 1, 1))
## 
## Coefficients:
##           ar1      ma1
##       -0.2512  -0.7518
## s.e.   0.1268   0.0605
## 
## sigma^2 estimated as 66.29:  log likelihood = -236.19,  aic = 478.38
austourists %>% auto.arima()
## Series: . 
## ARIMA(1,0,0)(1,1,0)[4] with drift 
## 
## Coefficients:
##          ar1     sar1   drift
##       0.4705  -0.5305  0.5489
## s.e.  0.1154   0.1122  0.0864
## 
## sigma^2 estimated as 5.15:  log likelihood=-142.48
## AIC=292.97   AICc=293.65   BIC=301.6

#Auto.arima removed the moving average part and added drift, and reached a better result. #(1-sigmaB-…sigmapBp)(((1-B)d)(yt))=c

#11

plot(ma(usmelec, order=12))

#There is a positive trend. Taking the MA may have reduced the seasonal component

plot(usmelec)

#Yup, it did..

usmelec_lambda = BoxCox.lambda(usmelec)
autoplot(BoxCox(usmelec, lambda=usmelec_lambda))

#BoxCox normalized the magnitude of the seasonal component.

usmelec %>% ur.kpss()
## 
## ####################################### 
## # KPSS Unit Root / Cointegration Test # 
## ####################################### 
## 
## The value of the test statistic is: 8.0041
usmelec %>%  diff()  %>% ur.kpss()
## 
## ####################################### 
## # KPSS Unit Root / Cointegration Test # 
## ####################################### 
## 
## The value of the test statistic is: 0.0089

#1 differencing works

usmelec_aa = auto.arima(usmelec)
usmelec_aa
## Series: usmelec 
## ARIMA(1,0,2)(0,1,1)[12] with drift 
## 
## Coefficients:
##          ar1      ma1      ma2     sma1   drift
##       0.9717  -0.4374  -0.2774  -0.7061  0.3834
## s.e.  0.0163   0.0483   0.0493   0.0310  0.0868
## 
## sigma^2 estimated as 57.67:  log likelihood=-1635.13
## AIC=3282.26   AICc=3282.44   BIC=3307.22
checkresiduals(usmelec_aa)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(1,0,2)(0,1,1)[12] with drift
## Q* = 42.725, df = 19, p-value = 0.001413
## 
## Model df: 5.   Total lags used: 24

#Residuals look normal and resemble white noise.

autoplot(forecast(usmelec_aa, h=12*15),PI=TRUE)

plot(forecast(usmelec_aa, h=12*15))

#The prediction intervals get wider over time. I would say around 3 years is a breaking point as the forecast is increasingly using forecasts to model the trend component.

#12

autoplot(mcopper)

mcopper_lambda = BoxCox.lambda(mcopper)
autoplot(BoxCox(mcopper,mcopper_lambda)) 

#I guess it helps a little bit.

auto.arima(mcopper)
## Series: mcopper 
## ARIMA(0,1,1) 
## 
## Coefficients:
##          ma1
##       0.2900
## s.e.  0.0419
## 
## sigma^2 estimated as 6026:  log likelihood=-3248.53
## AIC=6501.07   AICc=6501.09   BIC=6509.73
auto.arima(mcopper, lambda=mcopper_lambda)
## Series: mcopper 
## ARIMA(0,1,1) 
## Box Cox transformation: lambda= 0.1919047 
## 
## Coefficients:
##          ma1
##       0.3720
## s.e.  0.0388
## 
## sigma^2 estimated as 0.04997:  log likelihood=45.05
## AIC=-86.1   AICc=-86.08   BIC=-77.43

#Wow, guess it helped a lot

autoplot(forecast(auto.arima(mcopper, lambda=mcopper_lambda)))

forecast(auto.arima(mcopper, lambda=mcopper_lambda)) %>% checkresiduals()

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(0,1,1)
## Q* = 22.913, df = 23, p-value = 0.4659
## 
## Model df: 1.   Total lags used: 24

#These look like white noise

arima(mcopper, order=c(0,1,0)) %>% forecast() %>% autoplot()

arima(mcopper, order=c(0,1,0)) %>% forecast() %>% checkresiduals()

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(0,1,0)
## Q* = 74.187, df = 24, p-value = 4.99e-07
## 
## Model df: 0.   Total lags used: 24

#This does not look like white noise due to autocorrelations

ets(mcopper)
## ETS(M,Ad,N) 
## 
## Call:
##  ets(y = mcopper) 
## 
##   Smoothing parameters:
##     alpha = 0.9999 
##     beta  = 0.2141 
##     phi   = 0.8 
## 
##   Initial states:
##     l = 265.0541 
##     b = -3.9142 
## 
##   sigma:  0.0632
## 
##      AIC     AICc      BIC 
## 8052.038 8052.189 8078.049

These results are less compelling than the ARIMA models

#13

autoplot(hsales)

#Exhibits seasonality.

hsales %>% ur.kpss()
## 
## ####################################### 
## # KPSS Unit Root / Cointegration Test # 
## ####################################### 
## 
## The value of the test statistic is: 0.115

#I’ll try differencing to remove seasonality, but I do think autoarima could handle this data.

hsales_norm = diff(log(hsales),12)
autoplot(hsales_norm)

#Ok, looks stationary now

hsales_aa = auto.arima(hsales_norm)
hsales_aa
## Series: hsales_norm 
## ARIMA(2,0,2)(2,0,0)[12] with non-zero mean 
## 
## Coefficients:
##          ar1     ar2     ma1      ma2     sar1     sar2     mean
##       0.0498  0.8203  0.8433  -0.0281  -0.6244  -0.3207  -0.0056
## s.e.  0.3206  0.2980  0.3284   0.0737   0.0617   0.0615   0.0416
## 
## sigma^2 estimated as 0.009324:  log likelihood=241.64
## AIC=-467.28   AICc=-466.71   BIC=-438.7
checkresiduals(hsales_aa)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(2,0,2)(2,0,0)[12] with non-zero mean
## Q* = 20.244, df = 17, p-value = 0.262
## 
## Model df: 7.   Total lags used: 24

#Looks like white noise!

hsales_aa %>% forecast(., h=24) %>% autoplot()

#Looks pretty good

hsales_norm %>% ets() %>% forecast(,h=24)%>% autoplot

#Hmm, I don’t hate this. I personally prefer the arima version as it has stable width prediction intervals, though that may be a bit misleading. I also prefer variable forecasts over naive.

#14.

stlf(hsales_norm, method="arima") %>% autoplot()

#Wow, very cool.

forecast::accuracy(hsales_aa %>% forecast(., h=24))
##                       ME       RMSE        MAE MPE MAPE     MASE         ACF1
## Training set 0.001439556 0.09526952 0.07731403 NaN  Inf 0.318452 -0.008752242
forecast::accuracy(hsales_norm %>% ets() %>% forecast(,h=24))
##                       ME      RMSE        MAE MPE MAPE      MASE       ACF1
## Training set 0.001555115 0.1166692 0.09410833 NaN  Inf 0.3876268 0.01314119
forecast::accuracy(stlf(hsales_norm, method="arima"))
##                       ME      RMSE        MAE MPE MAPE      MASE        ACF1
## Training set 0.001030348 0.1123553 0.09019102 NaN  Inf 0.3714916 -0.02132857

#Looks like the stlf arima is slightly better than the ets.Both are worse than the auto,arima(top)

#15

train_aa = auto.arima(train)
train_aa
## Series: train 
## ARIMA(0,1,1)(0,1,1)[12] 
## 
## Coefficients:
##           ma1     sma1
##       -0.4127  -0.5476
## s.e.   0.0533   0.0454
## 
## sigma^2 estimated as 125.2:  log likelihood=-1274.1
## AIC=2554.2   AICc=2554.27   BIC=2565.61
autoplot(forecast(train_aa))

#This is better

#16

autoplot(sheep)

#That is a (0,1,1) model, I think. Pretty sure theres a moving average in there.

tsdisplay(diff(sheep))

#This is showing seasonality. A (0,1,1) model is indiciated as it incorporates a first and seasonal difference, and non-seasonal and seasonal MA components.

1797+.42*(1797-1791)-.2*(1791-1627)-.3*(1627-1665)
## [1] 1778.12
1778+.42*(1778-1797)-.2*(1797-1791)-.3*(1791-1627)
## [1] 1719.62
1719+.42*(1719-1778)-.2*(1778-1797)-.3*(1797-1791)
## [1] 1696.22
sheep %>% auto.arima() %>% forecast(h=3)
##      Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 1940       1777.996 1687.454 1868.537 1639.524 1916.467
## 1941       1718.869 1561.544 1876.193 1478.261 1959.476
## 1942       1695.985 1494.151 1897.819 1387.306 2004.664

#yup, they’re significantly the same

#17

autoplot(bicoal)

tsdisplay(bicoal)

#I believe this ACF is showins seasonality, same with the PACF. Formula doesn’t contain a moving average. I’m leaning toward a (1,1,0) model here.

#lol wasn’t once enough.

162+.83*(545)-.34*(552)+.55*(534)-.38*(512)
## [1] 525.81
162+.83*(525)-.34*(545)+.55*(552)-.38*(534)
## [1] 513.13
162+.83*(513)-.34*(525)+.55*(545)-.38*(552)
## [1] 499.28
gdp_data <- Quandl("FRED/GDP", type="ts")
autoplot(gdp_data)

gdp_arima = auto.arima(gdp_data)
gdp_arima
## Series: gdp_data 
## ARIMA(1,2,2)(0,0,2)[4] 
## 
## Coefficients:
##          ar1      ma1     ma2     sma1     sma2
##       0.4138  -1.5957  0.6453  -0.3032  -0.3554
## s.e.  0.1427   0.1179  0.1132   0.0760   0.0981
## 
## sigma^2 estimated as 26582:  log likelihood=-1927.89
## AIC=3867.78   AICc=3868.07   BIC=3889.93
gdp_arima %>% forecast(,h=5) %>% autoplot()

gdp_arima %>% checkresiduals()

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(1,2,2)(0,0,2)[4]
## Q* = 3.3139, df = 3, p-value = 0.3457
## 
## Model df: 5.   Total lags used: 8

#Wow, look at COVID

gdp_ets = gdp_data %>% ets() %>% checkresiduals()

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(M,A,N)
## Q* = 28.491, df = 4, p-value = 9.919e-06
## 
## Model df: 4.   Total lags used: 8

#Not white noise

 gdp_data %>% diff() %>%  diff() %>% ets() %>% checkresiduals()

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(A,N,N)
## Q* = 103.08, df = 6, p-value < 2.2e-16
## 
## Model df: 2.   Total lags used: 8
gdp_data %>% diff() %>%  diff() %>% ets() %>% forecast(, h=4) %>% autoplot

#This is the most code I’ve ever done, and honestly I enjoyed it. For the first time it wasn’t looking up how to do things in the book and stack, it was ok, I know how to do this one and bang it out. I’m against a hard time constraint, and did the absolute most I can.