suppressMessages(
library(fpp2) )

7.1

a-)

summary(ses(pigs,h=4))
## 
## Forecast method: Simple exponential smoothing
## 
## Model Information:
## Simple exponential smoothing 
## 
## Call:
##  ses(y = pigs, h = 4) 
## 
##   Smoothing parameters:
##     alpha = 0.2971 
## 
##   Initial states:
##     l = 77260.0561 
## 
##   sigma:  10308.58
## 
##      AIC     AICc      BIC 
## 4462.955 4463.086 4472.665 
## 
## Error measures:
##                    ME    RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set 385.8721 10253.6 7961.383 -0.922652 9.274016 0.7966249 0.01282239
## 
## Forecasts:
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Sep 1995       98816.41 85605.43 112027.4 78611.97 119020.8
## Oct 1995       98816.41 85034.52 112598.3 77738.83 119894.0
## Nov 1995       98816.41 84486.34 113146.5 76900.46 120732.4
## Dec 1995       98816.41 83958.37 113674.4 76092.99 121539.8

b-)

se <- ses(pigs, h=4)

paste('lower',98816.41  - (sd(se$residuals * 1.96)))
## [1] "lower 78679.9711418255"
paste('upper',98816.41  + (sd(se$residuals * 1.96)))
## [1] "upper 118952.848858174"
se$lower[, '95%'][1]
## [1] 78611.97
se$upper[, '95%'][1]
## [1] 119020.8

I see that the R produce interval are slighty bigger range than the interval calculated.

7.5

a-)

autoplot(books)

The 2 types Books sales seem increasing but you cant see a seasonal sales or cycle.

b-)

paperback
ses(books[,1]) %>% autoplot() + autolayer(fitted(ses(books[,1])), series='paperback-fitted') 

hardcover
ses(books[,2]) %>% autoplot() + autolayer(fitted(ses(books[,2])), series='hardcover-fitted') 

c-)

paperback
accuracy(ses(books[,1]))
##                    ME     RMSE     MAE       MPE     MAPE      MASE       ACF1
## Training set 7.175981 33.63769 27.8431 0.4736071 15.57784 0.7021303 -0.2117522
hardcover
accuracy(ses(books[,2]))
##                    ME     RMSE      MAE      MPE     MAPE      MASE       ACF1
## Training set 9.166735 31.93101 26.77319 2.636189 13.39487 0.7987887 -0.1417763

7.6

a-)

paperback
holt(books[,1])
##    Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 31       209.4668 166.6035 252.3301 143.9130 275.0205
## 32       210.7177 167.8544 253.5811 145.1640 276.2715
## 33       211.9687 169.1054 254.8320 146.4149 277.5225
## 34       213.2197 170.3564 256.0830 147.6659 278.7735
## 35       214.4707 171.6073 257.3340 148.9169 280.0245
## 36       215.7216 172.8583 258.5850 150.1678 281.2754
## 37       216.9726 174.1093 259.8360 151.4188 282.5264
## 38       218.2236 175.3602 261.0869 152.6697 283.7774
## 39       219.4746 176.6112 262.3379 153.9207 285.0284
## 40       220.7255 177.8621 263.5889 155.1716 286.2794
hardcover
holt(books[,2])
##    Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 31       250.1739 212.7390 287.6087 192.9222 307.4256
## 32       253.4765 216.0416 290.9113 196.2248 310.7282
## 33       256.7791 219.3442 294.2140 199.5274 314.0308
## 34       260.0817 222.6468 297.5166 202.8300 317.3334
## 35       263.3843 225.9494 300.8192 206.1326 320.6360
## 36       266.6869 229.2520 304.1218 209.4351 323.9386
## 37       269.9895 232.5546 307.4244 212.7377 327.2412
## 38       273.2921 235.8572 310.7270 216.0403 330.5438
## 39       276.5947 239.1597 314.0296 219.3429 333.8465
## 40       279.8973 242.4623 317.3322 222.6455 337.1491

b-)

paperback
accuracy(holt(books[,1]))
##                     ME     RMSE      MAE       MPE     MAPE      MASE
## Training set -3.717178 31.13692 26.18083 -5.508526 15.58354 0.6602122
##                    ACF1
## Training set -0.1750792
hardcover
accuracy(holt(books[,2]))
##                      ME     RMSE      MAE       MPE    MAPE      MASE
## Training set -0.1357882 27.19358 23.15557 -2.114792 12.1626 0.6908555
##                     ACF1
## Training set -0.03245186

c-)

autoplot(books[,1]) +
  autolayer(holt(books[,1]),series = 'hw',PI=F) +
  autolayer(ses(books[,1]),series = 'ses',PI=F)

autoplot(books[,2]) +
  autolayer(holt(books[,2]),series = 'hw',PI=F) +
  autolayer(ses(books[,2]),series = 'ses',PI=F)

7.7

default.holt <- holt(eggs , h = 100)
damped.holt <- holt(eggs , h = 100, damped = T)
exp.holt <- holt(eggs , h = 100, exponential = T)
lambda.holt <- holt(eggs , h = 100, lambda = 'auto', biasadj = T)


autoplot(eggs) +
  autolayer(default.holt, series='default', PI=F) +
  autolayer(damped.holt, series='damped', PI=F) +
  autolayer(exp.holt, series='exp', PI=F) +
  autolayer(lambda.holt, series='boxcox', PI=F) 

The damped and damped exponential seem very close trend results. the box cox and exponential seem very close trend results. The default result seems by itself trend straight down line.

accuracy(default.holt)
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set 0.04499087 26.58219 19.18491 -1.142201 9.653791 0.9463626
##                    ACF1
## Training set 0.01348202
accuracy(damped.holt)
##                     ME     RMSE     MAE       MPE     MAPE      MASE
## Training set -2.891496 26.54019 19.2795 -2.907633 10.01894 0.9510287
##                      ACF1
## Training set -0.003195358
accuracy(exp.holt)
##                     ME     RMSE      MAE       MPE     MAPE      MASE      ACF1
## Training set 0.4918791 26.49795 19.29399 -1.263235 9.766049 0.9517436 0.0103908
accuracy(lambda.holt)
##                      ME     RMSE      MAE      MPE     MAPE      MASE      ACF1
## Training set -0.2015298 26.38689 18.99362 -1.63043 9.713172 0.9369265 0.0383996

The result of RMSE all of them are pretty close with just decimal difference. if we have to select one will be the Lambda one which has the best RMSE

7.8

a-)

retaildata <- readxl::read_excel("retail.xlsx", skip=1)


myts <- ts(retaildata[,"A3349709X"],
  frequency=12, start=c(1982,4))
autoplot(myts)

because the seasonality is increasing.

b-)

summary(hw(myts, seasonal = "multi",damped = F))
## 
## Forecast method: Holt-Winters' multiplicative method
## 
## Model Information:
## Holt-Winters' multiplicative method 
## 
## Call:
##  hw(y = myts, seasonal = "multi", damped = F) 
## 
##   Smoothing parameters:
##     alpha = 0.8439 
##     beta  = 0.001 
##     gamma = 0.0569 
## 
##   Initial states:
##     l = 60.0587 
##     b = 0.8487 
##     s = 0.9671 0.9175 1.0584 1.1371 1.0343 1.0717
##            1.0187 1.0045 0.9912 0.9301 0.9602 0.9093
## 
##   sigma:  0.0668
## 
##      AIC     AICc      BIC 
## 4326.889 4328.574 4393.916 
## 
## Error measures:
##                     ME     RMSE      MAE         MPE    MAPE      MASE
## Training set 0.6269268 16.20066 11.21058 -0.09885034 4.83248 0.3420854
##                     ACF1
## Training set -0.01791788
## 
## Forecasts:
##          Point Forecast    Lo 80     Hi 80    Lo 95     Hi 95
## Jan 2014       629.5695 575.6504  683.4887 547.1073  712.0318
## Feb 2014       584.5749 519.0182  650.1315 484.3146  684.8351
## Mar 2014       616.2703 533.9845  698.5561 490.4250  742.1155
## Apr 2014       596.6718 506.0038  687.3398 458.0071  735.3365
## May 2014       599.2660 498.3401  700.1918 444.9132  753.6188
## Jun 2014       570.8063 466.1013  675.5113 410.6738  730.9388
## Jul 2014       602.9768 483.9860  721.9676 420.9961  784.9576
## Aug 2014       617.9231 487.9412  747.9051 419.1328  816.7134
## Sep 2014       621.6684 483.2650  760.0718 409.9986  833.3381
## Oct 2014       656.2616 502.5046  810.0187 421.1105  891.4128
## Nov 2014       663.7379 500.8422  826.6336 414.6104  912.8654
## Dec 2014       742.4375 552.3069  932.5682 451.6578 1033.2172
## Jan 2015       643.3577 471.0944  815.6210 379.9037  906.8117
## Feb 2015       597.3548 431.5215  763.1880 343.7346  850.9749
## Mar 2015       629.7191 448.8949  810.5433 353.1723  906.2659
## Apr 2015       609.6697 428.9629  790.3766 333.3025  886.0370
## May 2015       612.2972 425.3074  799.2870 326.3210  898.2734
## Jun 2015       583.1967 399.9905  766.4029 303.0070  863.3864
## Jul 2015       616.0424 417.2621  814.8227 312.0341  920.0506
## Aug 2015       631.2889 422.3305  840.2473 311.7145  950.8633
## Sep 2015       635.0914 419.7029  850.4799 305.6832  964.4997
## Oct 2015       670.4067 437.6987  903.1146 314.5106 1026.3028
## Nov 2015       678.0190 437.3753  918.6626 309.9863 1046.0516
## Dec 2015       758.3839 483.4110 1033.3567 337.8492 1178.9185
summary(hw(myts, seasonal = "multi", damped = T))
## 
## Forecast method: Damped Holt-Winters' multiplicative method
## 
## Model Information:
## Damped Holt-Winters' multiplicative method 
## 
## Call:
##  hw(y = myts, seasonal = "multi", damped = T) 
## 
##   Smoothing parameters:
##     alpha = 0.7177 
##     beta  = 0.0073 
##     gamma = 1e-04 
##     phi   = 0.98 
## 
##   Initial states:
##     l = 58.5098 
##     b = 0.2133 
##     s = 0.997 0.9329 0.9988 1.1725 1.0477 1.0367
##            0.9938 0.9952 0.9644 0.9179 0.9725 0.9706
## 
##   sigma:  0.0633
## 
##      AIC     AICc      BIC 
## 4283.222 4285.111 4354.192 
## 
## Error measures:
##                    ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set 1.482515 15.65027 10.92983 0.3859787 4.551222 0.3335188 0.09915935
## 
## Forecasts:
##          Point Forecast    Lo 80    Hi 80    Lo 95     Hi 95
## Jan 2014       612.1426 562.4737 661.8116 536.1805  688.1047
## Feb 2014       573.1030 515.6697 630.5363 485.2663  660.9397
## Mar 2014       613.8077 542.3224 685.2929 504.4804  723.1349
## Apr 2014       598.8676 520.4536 677.2816 478.9438  718.7914
## May 2014       601.3891 514.6846 688.0936 468.7860  733.9922
## Jun 2014       568.6949 479.7024 657.6875 432.5926  704.7973
## Jul 2014       598.7143 498.0854 699.3431 444.8158  752.6128
## Aug 2014       619.0468 508.1871 729.9065 449.5015  788.5920
## Sep 2014       619.3910 501.9511 736.8309 439.7822  798.9998
## Oct 2014       647.3120 518.0291 776.5950 449.5908  845.0333
## Nov 2014       655.4658 518.1512 792.7804 445.4612  865.4704
## Dec 2014       734.9023 573.9895 895.8151 488.8074  980.9972
## Jan 2015       627.0841 484.0075 770.1607 408.2674  845.9008
## Feb 2015       586.7808 447.6391 725.9225 373.9819  799.5797
## Mar 2015       628.1321 473.6852 782.5791 391.9259  864.3384
## Apr 2015       612.5342 456.6751 768.3934 374.1683  850.9002
## May 2015       614.8103 453.2102 776.4104 367.6643  861.9564
## Jun 2015       581.1070 423.5771 738.6368 340.1859  822.0280
## Jul 2015       611.4942 440.7749 782.2136 350.4016  872.5869
## Aug 2015       631.9708 450.4995 813.4421 354.4345  909.5072
## Sep 2015       632.0391 445.5886 818.4896 346.8876  917.1905
## Oct 2015       660.2413 460.3639 860.1188 354.5552  965.9275
## Nov 2015       668.2724 460.8634 875.6813 351.0677  985.4770
## Dec 2015       748.9482 510.8565 987.0399 384.8183 1113.0781
autoplot(myts) +
  autolayer(hw(myts, seasonal = "multi",damped = F),series = 'damped-false',PI=F) +
  autolayer(hw(myts, seasonal = "multi",damped = T),series = 'damped-true',PI=F)

c-)

accuracy(hw(myts, seasonal = "multi",damped = F))
##                     ME     RMSE      MAE         MPE    MAPE      MASE
## Training set 0.6269268 16.20066 11.21058 -0.09885034 4.83248 0.3420854
##                     ACF1
## Training set -0.01791788
accuracy(hw(myts, seasonal = "multi",damped = T))
##                    ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set 1.482515 15.65027 10.92983 0.3859787 4.551222 0.3335188 0.09915935

The damped trend have better RMSE.

d-)
autoplot(residuals(hw(myts, seasonal = "multi",damped = T)))

they seem white noise.

7.9

myts.train <- window(myts, end = 2008)
myts.test <- window(myts, start = 2009)

autoplot(myts.train) +
  autolayer(hw(myts.train, seasonal = "multi",damped = F),series = 'damped-false',PI=F) +
  autolayer(hw(myts.train, seasonal = "multi",damped = T),series = 'damped-true',PI=F)

accuracy(hw(myts.train, seasonal = "multi",damped = F))
##                     ME     RMSE      MAE      MPE     MAPE      MASE       ACF1
## Training set 0.6060111 13.11679 9.293446 0.108113 4.679442 0.3335598 0.03697905
accuracy(hw(myts.test, seasonal = "multi",damped = T))
##                    ME     RMSE      MAE      MPE     MAPE      MASE       ACF1
## Training set 1.212733 19.00978 14.21132 0.125952 3.030403 0.3044195 0.00796861

After applied the HW we seem a better improved on the RMSE results.now we got better RMSE on the dumped being False.