library(fpp2)
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
## -- Attaching packages ---------------------------------------------- fpp2 2.4 --
## √ ggplot2   3.3.2     √ fma       2.4  
## √ forecast  8.13      √ expsmooth 2.3
## 
library(seasonal)
library(tseries)
library(forecast)
str(pigs)
##  Time-Series [1:188] from 1980 to 1996: 76378 71947 33873 96428 105084 ...
head(pigs)
##         Jan    Feb    Mar    Apr    May    Jun
## 1980  76378  71947  33873  96428 105084  95741
ses_pigs <- ses(pigs, h = 4)
ses_pigs$model
## Simple exponential smoothing 
## 
## Call:
##  ses(y = pigs, h = 4) 
## 
##   Smoothing parameters:
##     alpha = 0.2971 
## 
##   Initial states:
##     l = 77260.0561 
## 
##   sigma:  10308.58
## 
##      AIC     AICc      BIC 
## 4462.955 4463.086 4472.665
ses_pigs$upper[1, "95%"]
##      95% 
## 119020.8
ses_pigs$lower[1, "95%"]
##      95% 
## 78611.97
s <- sd(ses_pigs$residuals)
ses_pigs$mean[1] + 1.96*s
## [1] 118952.8
ses_pigs$mean[1] - 1.96*s
## [1] 78679.97
autoplot(ses_pigs)+autolayer(ses_pigs$fitted)

SES <- function(y, alpha, l0){y_hat <- l0
for(index in 1:length(y)){y_hat <- alpha*y[index]+(1-alpha)*y_hat}
cat("Forecast of next observation by SES function: ", as.character(y_hat), sep = "\n")}
alpha <- ses_pigs$model$par[1]
l0 <- ses_pigs$model$par[2]
SES(pigs, alpha = alpha, l0 = l0)
## Forecast of next observation by SES function: 
## 98816.4061115907
writeLines(paste("Forecast of next observation by ses function: ",   as.character(ses_pigs$mean[1])))
## Forecast of next observation by ses function:  98816.4061115907
ses_ausbeer <- ses(ausbeer, h = 1)
alpha <- ses_ausbeer$model$par[1]
l0 <- ses_ausbeer$model$par[2]
SES(ausbeer, alpha = alpha, l0 = l0)
## Forecast of next observation by SES function: 
## 421.313649201742
writeLines(paste("Forecast of next observation by ses function: ",  as.character(ses_ausbeer$mean[1])))
## Forecast of next observation by ses function:  421.313649201742
sse_fn <- function( pars=c(alpha,l0), y){sse <- 0
alpha <- pars[1]
l0 <- pars[2]
y_hat <- l0
for(index in 1:length(y)){
    sse <- sse + (y[index] - y_hat)^2
    y_hat <- alpha*y[index] + (1 - alpha)*y_hat}
return(sse)}
SES <- function(init_pars, data){
fc_next <- 0
SSE <- function(pars, data){
    error <- 0
    SSE <- 0
    alpha <- pars[1]
    l0 <- pars[2]
    y_hat <- l0
    for(index in 1:length(data)){
    error <- data[index] - y_hat
    SSE <- SSE + error^2
    y_hat <- alpha*data[index] + (1 - alpha)*y_hat}
fc_next <<- y_hat
return(SSE)}    
  optim_pars <- optim(par = init_pars, data = data, fn = SSE)
  return(list(
    Next_observation_forecast = fc_next,
    alpha = optim_pars$par[1],
    l0 = optim_pars$par[2]
    ))
}
SES(c(0.5, pigs[1]), pigs)
## $Next_observation_forecast
## [1] 98814.63
## 
## $alpha
## [1] 0.2990081
## 
## $l0
## [1] 76379.27
print("Next observation forecast by ses function")
## [1] "Next observation forecast by ses function"
ses_pigs$mean[1]
## [1] 98816.41
print("alpha calculated by ses function")
## [1] "alpha calculated by ses function"
ses_pigs$model$par[1]
##     alpha 
## 0.2971488
print("l0 calculated by ses function")
## [1] "l0 calculated by ses function"
ses_pigs$model$par[2]
##        l 
## 77260.06
SES(c(0.5, ausbeer[1]), ausbeer)
## $Next_observation_forecast
## [1] 421.3166
## 
## $alpha
## [1] 0.1488036
## 
## $l0
## [1] 259.6585
print("Next observation forecast by ses function")
## [1] "Next observation forecast by ses function"
ses_ausbeer$mean[1]
## [1] 421.3136
print("alpha calculated by ses function")
## [1] "alpha calculated by ses function"
ses_ausbeer$model$par[1]
##     alpha 
## 0.1489004
print("l0 calculated by ses function")
## [1] "l0 calculated by ses function"
ses_ausbeer$model$par[2]
##        l 
## 259.6592

They are working out similar results

5.a

str(books)
##  Time-Series [1:30, 1:2] from 1 to 30: 199 172 111 209 161 119 195 195 131 183 ...
##  - attr(*, "dimnames")=List of 2
##   ..$ : NULL
##   ..$ : chr [1:2] "Paperback" "Hardcover"
head(books)
## Time Series:
## Start = 1 
## End = 6 
## Frequency = 1 
##   Paperback Hardcover
## 1       199       139
## 2       172       128
## 3       111       172
## 4       209       139
## 5       161       191
## 6       119       168
autoplot(books)

5.b

ses_paperback <- ses(books[, "Paperback"], h = 4)
ses_hardcover <- ses(books[, "Hardcover"], h = 4)
autoplot(books[, "Paperback"], series = "Paperback")+autolayer(ses_paperback, series = "Paperback")+autolayer(books[, "Hardcover"], series = "Hardcover")+autolayer(ses_hardcover, series = "Hardcover", PI = FALSE)+ylab("Sales amount")+ggtitle("Sales of paperback and hardcover books")

5.c

sqrt(mean(ses_paperback$residuals^2))
## [1] 33.63769
sqrt(mean(ses_hardcover$residuals^2))
## [1] 31.93101

6.a

holt_paperback <- holt(books[, "Paperback"], h = 4)
holt_hardcover <- holt(books[, "Hardcover"], h = 4)
autoplot(books[, "Paperback"])+autolayer(holt_paperback)

autoplot(books[, "Hardcover"])+autolayer(holt_hardcover)

6.b

s_paperback <- sqrt(mean(holt_paperback$residuals^2))
s_hardcover <- sqrt(mean(holt_hardcover$residuals^2))
s_paperback
## [1] 31.13692
s_hardcover
## [1] 27.19358

It is better to use Holt’s linear method even if one more parameter is needed than SES.

6.c I think that the sales of hardcover were superior to the ones of paperback. Since RMSE value is lower for hardcover deals. What’s more, on the grounds that the conjectures of soft cover deals couldn’t mirror the example in the information utilizing Holt’s.

6.d

writeLines("95% PI of paperback sales calculated by holt function")
## 95% PI of paperback sales calculated by holt function
holt_paperback$upper[1, "95%"]
##      95% 
## 275.0205
holt_paperback$lower[1, "95%"]
##     95% 
## 143.913
writeLines("95% PI of paperback sales calculated by formula")
## 95% PI of paperback sales calculated by formula
holt_paperback$mean[1] + 1.96*s_paperback
## [1] 270.4951
holt_paperback$mean[1] - 1.96*s_paperback
## [1] 148.4384
writeLines("95% PI of hardcover sales calculated by holt function")
## 95% PI of hardcover sales calculated by holt function
holt_hardcover$upper[1, "95%"]
##      95% 
## 307.4256
holt_hardcover$lower[1, "95%"]
##      95% 
## 192.9222
writeLines("95% PI of hardcover sales calculated by formula")
## 95% PI of hardcover sales calculated by formula
holt_hardcover$mean[1] + 1.96*s_hardcover
## [1] 303.4733
holt_hardcover$mean[1] - 1.96*s_hardcover
## [1] 196.8745
default <- holt(eggs, h=100)
damped <- holt(eggs, h=100, damped = T)
exponential <- holt(eggs, h=100, exponential = T)
lambda <- holt(eggs, h=100, lambda = 'auto', biasadj = T)
da_ex <- holt(eggs, h=100, exponential = T, damped = T)
da_la <- holt(eggs, h=100, damped = T, lambda = 'auto', biasadj = T)
autoplot(eggs)+autolayer(default, series='Default', PI=F)+autolayer(damped, series='Damped', PI=F)+autolayer(exponential, series='Exponential', PI=F)+autolayer(lambda, series='Box-Cox Transformed', PI=F)+autolayer(da_ex, series='Damped & Exponential', PI=F)+autolayer(da_la, series='Damped & Box-Cox', PI=F)+
ggtitle('Forecast of US Eggs Prices')+xlab('Year')+
ylab('Price of Dozen Eggs')

The exponential trend forecast appears to be very close to the Box-Cox transformed prediction. And they both shows much more gentle decline than the damped trend method.

8.a

retaildata <- readxl::read_excel("retail.xlsx", skip=1)
myts <- ts(retaildata[,"A3349873A"],frequency=12, start=c(1982,4))
autoplot(myts)+ggtitle('Turnover; New South Wales; Other retailing')+ylab('Turnover')

From the plot, the seasonal variation increases with time. Therefore, multiplicative seasonality is necessary.

8.b

fit1 <- hw(myts, seasonal='multiplicative', damped=F)
summary(fit1)
## 
## Forecast method: Holt-Winters' multiplicative method
## 
## Model Information:
## Holt-Winters' multiplicative method 
## 
## Call:
##  hw(y = myts, seasonal = "multiplicative", damped = F) 
## 
##   Smoothing parameters:
##     alpha = 0.504 
##     beta  = 1e-04 
##     gamma = 0.4578 
## 
##   Initial states:
##     l = 62.8715 
##     b = 0.8152 
##     s = 0.9514 0.886 0.9114 1.5529 1.0184 0.9813
##            0.9589 0.9898 0.9593 0.8883 0.9094 0.9929
## 
##   sigma:  0.0513
## 
##      AIC     AICc      BIC 
## 4040.084 4041.770 4107.112 
## 
## Error measures:
##                     ME     RMSE      MAE        MPE     MAPE      MASE
## Training set 0.1170648 13.29378 8.991856 -0.1217735 3.918351 0.4748948
##                    ACF1
## Training set 0.08635577
## 
## Forecasts:
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Jan 2014       390.3784 364.7154 416.0413 351.1303 429.6264
## Feb 2014       391.1995 362.4039 419.9951 347.1605 435.2386
## Mar 2014       427.9732 393.4376 462.5088 375.1555 480.7909
## Apr 2014       394.1500 359.7834 428.5167 341.5908 446.7093
## May 2014       403.4598 365.8492 441.0704 345.9394 460.9802
## Jun 2014       392.3988 353.6036 431.1940 333.0667 451.7309
## Jul 2014       410.9940 368.1710 453.8169 345.5019 476.4860
## Aug 2014       405.6186 361.3056 449.9315 337.8478 473.3893
## Sep 2014       416.5669 369.0509 464.0828 343.8975 489.2362
## Oct 2014       437.9753 385.9982 489.9524 358.4832 517.4674
## Nov 2014       585.8096 513.6953 657.9240 475.5203 696.0990
## Dec 2014       577.7851 504.1964 651.3737 465.2409 690.3292
## Jan 2015       399.6599 342.8992 456.4206 312.8519 486.4679
## Feb 2015       400.4831 342.1250 458.8412 311.2321 489.7341
## Mar 2015       438.1104 372.6939 503.5270 338.0644 538.1564
## Apr 2015       403.4687 341.8115 465.1258 309.1722 497.7652
## May 2015       412.9807 348.4595 477.5019 314.3041 511.6574
## Jun 2015       401.6414 337.5529 465.7300 303.6264 499.6565
## Jul 2015       420.6566 352.1637 489.1496 315.9057 525.4076
## Aug 2015       415.1371 346.2205 484.0538 309.7383 520.5360
## Sep 2015       426.3243 354.2214 498.4272 316.0524 536.5961
## Oct 2015       448.2152 371.0413 525.3891 330.1879 566.2425
## Nov 2015       599.4807 494.4676 704.4937 438.8771 760.0842
## Dec 2015       591.2440 485.9383 696.5497 430.1928 752.2952
fit2 <- hw(myts, seasonal='multiplicative', damped=T)
summary(fit2)
## 
## Forecast method: Damped Holt-Winters' multiplicative method
## 
## Model Information:
## Damped Holt-Winters' multiplicative method 
## 
## Call:
##  hw(y = myts, seasonal = "multiplicative", damped = T) 
## 
##   Smoothing parameters:
##     alpha = 0.5524 
##     beta  = 2e-04 
##     gamma = 0.4476 
##     phi   = 0.9328 
## 
##   Initial states:
##     l = 62.9106 
##     b = 0.6659 
##     s = 0.8986 0.8635 0.8733 1.5546 1.1214 1.0392
##            1.0033 0.9655 0.9238 0.8886 0.9303 0.9378
## 
##   sigma:  0.0527
## 
##      AIC     AICc      BIC 
## 4055.981 4057.871 4126.952 
## 
## Error measures:
##                    ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set 1.414869 13.30494 9.042151 0.6105987 3.959617 0.4775511 0.04077895
## 
## Forecasts:
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Jan 2014       391.3161 364.8883 417.7439 350.8983 431.7339
## Feb 2014       392.2638 361.9876 422.5401 345.9603 438.5673
## Mar 2014       427.6983 391.0174 464.3792 371.5997 483.7969
## Apr 2014       391.6405 354.9948 428.2863 335.5957 447.6853
## May 2014       399.2265 358.9916 439.4614 337.6925 460.7605
## Jun 2014       387.6109 345.9350 429.2868 323.8731 451.3487
## Jul 2014       405.5421 359.3641 451.7201 334.9189 476.1653
## Aug 2014       399.6910 351.7735 447.6085 326.4076 472.9745
## Sep 2014       410.5242 358.9526 462.0958 331.6522 489.3962
## Oct 2014       430.9373 374.4342 487.4405 344.5233 517.3514
## Nov 2014       574.0409 495.7448 652.3370 454.2974 693.7845
## Dec 2014       564.4915 484.6264 644.3565 442.3484 686.6345
## Jan 2015       391.6898 330.2053 453.1743 297.6574 485.7222
## Feb 2015       392.6313 329.2488 456.0139 295.6961 489.5666
## Mar 2015       428.0919 357.1258 499.0579 319.5587 536.6251
## Apr 2015       391.9948 325.3521 458.6376 290.0736 493.9161
## May 2015       399.5818 329.9960 469.1677 293.1595 506.0042
## Jun 2015       387.9507 318.8208 457.0805 282.2257 493.6756
## Jul 2015       405.8924 331.9582 479.8266 292.8198 518.9650
## Aug 2015       400.0316 325.6130 474.4502 286.2182 513.8450
## Sep 2015       410.8695 332.8718 488.8672 291.5822 530.1567
## Oct 2015       431.2954 347.8100 514.7807 303.6156 558.9752
## Nov 2015       574.5123 461.1985 687.8262 401.2138 747.8109
## Dec 2015       564.9500 451.4869 678.4131 391.4232 738.4768
autoplot(myts)+autolayer(fit1, PI=F, series='Not damped')+autolayer(fit2, PI=F, series='Damped Trend')+guides(colour=guide_legend(title="Forecast"))+ggtitle("Turnover Forecast - Holt-Winter's Multiplicative Method")+ylab('Turnover')

8.c

accuracy(fit1)
##                     ME     RMSE      MAE        MPE     MAPE      MASE
## Training set 0.1170648 13.29378 8.991856 -0.1217735 3.918351 0.4748948
##                    ACF1
## Training set 0.08635577
accuracy(fit2)
##                    ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set 1.414869 13.30494 9.042151 0.6105987 3.959617 0.4775511 0.04077895

The undamped trend fit has better RMSE.

8.d

autoplot(residuals(fit1))+
ggtitle('Residuals')+ylab('')

8.e

retaildata <- readxl::read_excel("retail.xlsx", skip=1)
myts <- ts(retaildata[,"A3349873A"],frequency=12, start=c(1982,4))
autoplot(myts)+ggtitle('Turnover; New South Wales; Other retailing')+ylab('Turnover')

train <- window(myts, end=c(2010, 12))
test <- window(myts, start=c(2011,1))
autoplot(myts)+autolayer(train, series="Training")+autolayer(test, series="Test")+
ggtitle('Train-Test Split')+
ylab('Turnover')

fc_stl_ets_retail_train <- myts %>% stlm( s.window = 13, robust = TRUE, method = "ets", lambda = BoxCox.lambda(myts) ) %>%
forecast( h = 36, lambda = BoxCox.lambda(myts) )
## Warning in InvBoxCox(fcast$mean, lambda, biasadj, fcast): biasadj information
## not found, defaulting to FALSE.
fc_stl_ets_retail_train
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Jan 2014       408.2469 381.2173 436.9334 367.5512 452.8190
## Feb 2014       393.4052 364.6876 424.0760 350.2394 441.1409
## Mar 2014       419.1886 386.3656 454.4182 369.9157 474.0924
## Apr 2014       399.7044 366.0387 436.0402 349.2398 456.4167
## May 2014       419.5916 382.3303 459.9821 363.8003 482.7055
## Jun 2014       401.7075 363.9129 442.8819 345.1914 466.1327
## Jul 2014       418.8610 377.7554 463.8158 357.4555 489.2744
## Aug 2014       424.4700 381.0525 472.1418 359.6779 499.2187
## Sep 2014       434.9062 388.7659 485.7531 366.1162 514.7118
## Oct 2014       447.5625 398.4861 501.8308 374.4603 532.8168
## Nov 2014       484.5131 430.0780 544.8641 403.4837 579.3897
## Dec 2014       658.8878 585.2692 740.4585 549.2857 787.1027
## Jan 2015       430.6105 378.4311 488.9591 353.1115 522.5512
## Feb 2015       415.0603 363.1101 473.3796 337.9796 507.0516
## Mar 2015       442.0724 385.6898 505.5155 358.4657 542.2087
## Apr 2015       421.6607 366.2048 484.3056 339.5113 520.6418
## May 2015       442.4945 383.2416 509.5867 354.7740 548.5700
## Jun 2015       423.7594 365.4139 490.0718 337.4658 528.7080
## Jul 2015       441.7294 379.8877 512.1771 350.3190 553.2920
## Aug 2015       447.6043 383.7305 520.5630 353.2559 563.2274
## Sep 2015       458.5342 391.9813 534.7387 360.2902 579.3809
## Oct 2015       471.7872 402.2297 551.6148 369.1687 598.4579
## Nov 2015       510.4675 434.5297 597.7337 398.4744 648.9917
## Dec 2015       692.7952 591.6370 808.7245 543.5013 876.6816
## Jan 2016       454.0355 383.1308 536.1276 349.6660 584.6100
## Feb 2016       437.7481 367.9538 518.8231 335.1005 566.8206
## Mar 2016       466.0386 391.1005 553.2089 355.8651 604.8668
## Apr 2016       444.6618 371.6499 529.8857 337.4156 580.5175
## May 2016       466.4806 389.1742 556.8578 352.9717 610.6121
## Jun 2016       446.8600 371.3468 535.4362 336.0789 588.2472
## Jul 2016       465.6793 386.2606 558.9855 349.2163 614.6814
## Aug 2016       471.8309 390.3845 567.7235 352.4598 625.0517
## Sep 2016       483.2744 398.9768 582.7090 359.7836 642.2354
## Oct 2016       497.1479 409.5952 600.6013 368.9454 662.6113
## Nov 2016       537.6267 442.6223 649.9547 398.5349 717.3144
## Dec 2016       728.2150 602.4937 876.2391 543.9541 964.7353
fc_stl_ets_retail_train_without_tr <- myts %>% stlm( s.window = 13, robust = TRUE,  method = "ets" ) %>% forecast(h = 36)
autoplot(fc_stl_ets_retail_train_without_tr)

10.a

str(ukcars)
##  Time-Series [1:113] from 1977 to 2005: 330 371 271 344 358 ...
head(ukcars)
##         Qtr1    Qtr2    Qtr3    Qtr4
## 1977 330.371 371.051 270.670 343.880
## 1978 358.491 362.822
autoplot(ukcars)

There is a seasonal trend in the plot

10.b

seasadj_ukcars <- ukcars %>% stl(s.window = 4, robust = TRUE) %>% seasadj() 
autoplot(seasadj_ukcars)

10.c

stlf_ets_AAdN_ukcars <- ukcars %>% stlf(h = 8, etsmodel = "AAN", damped = TRUE)
autoplot(stlf_ets_AAdN_ukcars)

10.d

stlf_ets_AAN_ukcars <- ukcars %>% stlf(h = 8, etsmodel = "AAN", damped = FALSE)
autoplot(stlf_ets_AAN_ukcars)

10.e

ets_ukcars <- ets(ukcars)
summary(ets_ukcars)
## ETS(A,N,A) 
## 
## Call:
##  ets(y = ukcars) 
## 
##   Smoothing parameters:
##     alpha = 0.6199 
##     gamma = 1e-04 
## 
##   Initial states:
##     l = 314.2568 
##     s = -1.7579 -44.9601 21.1956 25.5223
## 
##   sigma:  25.9302
## 
##      AIC     AICc      BIC 
## 1277.752 1278.819 1296.844 
## 
## Training set error measures:
##                    ME     RMSE      MAE        MPE     MAPE      MASE
## Training set 1.313884 25.23244 20.17907 -0.1570979 6.629003 0.6576259
##                    ACF1
## Training set 0.02573334
autoplot(forecast(ets_ukcars, h = 8))

10.f

writeLines("")
print("Accuracy of STL + ETS(A, Ad, N) model")
## [1] "Accuracy of STL + ETS(A, Ad, N) model"
accuracy(stlf_ets_AAdN_ukcars)
##                    ME     RMSE      MAE        MPE     MAPE     MASE       ACF1
## Training set 1.551267 23.32113 18.48987 0.04121971 6.042764 0.602576 0.02262668
print("Accuracy of STL + ETS(A, A, N) model")
## [1] "Accuracy of STL + ETS(A, A, N) model"
accuracy(stlf_ets_AAN_ukcars)
##                      ME   RMSE     MAE        MPE    MAPE      MASE       ACF1
## Training set -0.3412727 23.295 18.1605 -0.5970778 5.98018 0.5918418 0.02103582
print("Accuracy of ETS(A, N, A) model")
## [1] "Accuracy of ETS(A, N, A) model"
accuracy(ets_ukcars)
##                    ME     RMSE      MAE        MPE     MAPE      MASE
## Training set 1.313884 25.23244 20.17907 -0.1570979 6.629003 0.6576259
##                    ACF1
## Training set 0.02573334

10.g The forecasts from the STL + ETS(A, Ad, N) model were the most reasonable ones since they best reflected the smaller-variation trend after 2001.

10.h

checkresiduals(stlf_ets_AAdN_ukcars)
## Warning in checkresiduals(stlf_ets_AAdN_ukcars): The fitted degrees of freedom
## is based on the model used for the seasonally adjusted data.

## 
##  Ljung-Box test
## 
## data:  Residuals from STL +  ETS(A,Ad,N)
## Q* = 24.138, df = 3, p-value = 2.337e-05
## 
## Model df: 5.   Total lags used: 8

11.a

str(visitors)
##  Time-Series [1:240] from 1985 to 2005: 75.7 75.4 83.1 82.9 77.3 ...
head(visitors)
##        May   Jun   Jul   Aug   Sep   Oct
## 1985  75.7  75.4  83.1  82.9  77.3 105.7
autoplot(visitors)

ggseasonplot(visitors)

11.b

visitors_train <- subset(visitors, end = length(visitors) - 24)
visitors_test <- subset(visitors,start = length(visitors) - 23)
hw_mul_visitors_train <- hw(visitors_train, h = 24,seasonal = "multiplicative")

11.c

autoplot(hw_mul_visitors_train)

11.d

fc_ets_visitors_train <- forecast(ets(visitors_train), h = 24)
autoplot(fc_ets_visitors_train)

fc_ets_add_BoxCox_visitors_train <- forecast(ets(visitors_train,lambda = BoxCox.lambda(visitors_train),additive.only = TRUE),h = 24)
autoplot(fc_ets_add_BoxCox_visitors_train)

fc_snaive_visitors_train <- snaive(visitors_train, h = 24)
autoplot(fc_snaive_visitors_train)

fc_BoxCox_stl_ets_visitors_train <- visitors_train %>%
stlm(lambda = BoxCox.lambda(visitors_train),
    s.window = 13,
    robust = TRUE,
    method = "ets") %>%
forecast(h = 24)
autoplot(fc_BoxCox_stl_ets_visitors_train)

11.e

accuracy(hw_mul_visitors_train, visitors_test)
##                      ME     RMSE      MAE        MPE      MAPE      MASE
## Training set -0.9749466 14.06539 10.35763 -0.5792169  4.223204 0.3970304
## Test set     72.9189889 83.23541 75.89673 15.9157249 17.041927 2.9092868
##                   ACF1 Theil's U
## Training set 0.1356528        NA
## Test set     0.6901318  1.151065
accuracy(fc_ets_visitors_train, visitors_test)
##                      ME     RMSE      MAE        MPE      MAPE     MASE
## Training set  0.7640074 14.53480 10.57657  0.1048224  3.994788 0.405423
## Test set     72.1992664 80.23124 74.55285 15.9202832 16.822384 2.857773
##                     ACF1 Theil's U
## Training set -0.05311217        NA
## Test set      0.58716982  1.127269
accuracy(fc_ets_add_BoxCox_visitors_train, visitors_test)
##                     ME     RMSE      MAE        MPE      MAPE      MASE
## Training set  1.001363 14.97096 10.82396  0.1609336  3.974215 0.4149057
## Test set     69.458843 78.61032 72.41589 15.1662261 16.273089 2.7758586
##                     ACF1 Theil's U
## Training set -0.02535299        NA
## Test set      0.67684148  1.086953
accuracy(fc_snaive_visitors_train, visitors_test)
##                    ME     RMSE      MAE      MPE      MAPE     MASE      ACF1
## Training set 17.29363 31.15613 26.08775 7.192445 10.285961 1.000000 0.6327669
## Test set     32.87083 50.30097 42.24583 6.640781  9.962647 1.619375 0.5725430
##              Theil's U
## Training set        NA
## Test set     0.6594016
accuracy(fc_BoxCox_stl_ets_visitors_train, visitors_test)
##                      ME     RMSE       MAE         MPE     MAPE      MASE
## Training set  0.5803348 13.36431  9.551391  0.08767744  3.51950 0.3661256
## Test set     76.3637263 84.24658 78.028992 16.87750474 17.51578 2.9910209
##                     ACF1 Theil's U
## Training set -0.05924203        NA
## Test set      0.64749552  1.178154
fets <- function(y, h) {
 forecast(ets(y),
          h = h)
}

13

str(ausbeer)
##  Time-Series [1:218] from 1956 to 2010: 284 213 227 308 262 228 236 320 272 233 ...
head(ausbeer)
##      Qtr1 Qtr2 Qtr3 Qtr4
## 1956  284  213  227  308
## 1957  262  228
plot(bricksq)

plot(dole)

plot(a10)

plot(h02)

bricksq.BC <- BoxCox(bricksq, BoxCox.lambda(bricksq))
a10.BC <- BoxCox(a10, BoxCox.lambda(a10))
ausbeer.train <- window(ausbeer, end = c(2007,2))
ausbeer.test  <- window(ausbeer, start = c(2007,3))
bricksq.BC.train    <- window(bricksq.BC, end = c(1991,1))
bricksq.BC.test    <- window(bricksq.BC, start = c(1991,2))
dole.train <- window(dole, end=c(1989,7))
dole.test <- window(dole, start=c(1989,8))
a10.train <- window(a10.BC, end = c(2005,6))
a10.test <- window(a10.BC, start = c(2005,7))
h02.train <- window(h02, end = c(2005,7))
h02.test <- window(h02, start = c(2005,8))
usmelec.train <- window(usmelec, end = c(2010,6))
usmelec.test <- window(usmelec, start = c(2010,7))

ausbeer.ets <- ets(ausbeer.train)
bricksq.BC.ets <- ets(bricksq.BC.train)
dole.ets <- ets(dole.train)
a10.ets <- ets(a10.train)
h02.ets <- ets(h02.train)
usmelec.ets <- ets(usmelec.train)
ausbeer.etsF <- forecast(ausbeer.ets, h=12)
bricksq.BC.etsF <- forecast(bricksq.BC.ets, h=12)
dole.etsF <- forecast(dole.ets, h=36)
a10.etsF <- forecast(a10.ets, h=36)
h02.etsF <- forecast(h02.ets, h=36)
usmelec.etsF <- forecast(usmelec.ets, h = 36)


ausbeer.sn <- snaive(ausbeer.train, h = 12)
bricksq.BC.sn <- snaive(bricksq.BC.train, h= 12)
dole.sn <- snaive(dole.train, h= 36)
a10.sn <- snaive(a10.train, h = 36)
h02.sn <- snaive(h02.train, h = 36)
usmelec.sn <- snaive(usmelec.train, h= 36)

ausbeer.stlf <- stlf(ausbeer.train, h = 12)
bricksq.BC.stlf <- stlf(bricksq.BC.train, h= 12)
dole.stlf <- stlf(dole.train, h= 36)
a10.stlf <- stlf(a10.train, h = 36)
h02.stlf <- stlf(h02.train, h = 36)
usmelec.stlf <- stlf(usmelec.train, h= 36)
autoplot(forecast(ets(bicoal)))

autoplot(forecast(ets(chicken)))

autoplot(forecast(ets(dole)))

autoplot(forecast(ets(usdeaths)))

autoplot(forecast(ets(lynx)))

autoplot(forecast(ets(ibmclose)))

autoplot(forecast(ets(eggs)))

usdeaths.ets2 <- ets(usdeaths,model = "MAM")
usdeaths.ets2F <- forecast(usdeaths.ets2, h=1)
usdeaths.hwM <- hw(usdeaths, seasonal = 'multiplicative', h=1)
usdeaths.ets2F$mean
##           Jan
## 1979 8233.107
ibmclose.ets2 <- ets(ibmclose, model = "ANN")
ibmclose.ets2.F <- forecast(ibmclose.ets2,1)
ibmclose.ets2.F
##     Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## 370       356.9995 347.6907 366.3083 342.7629 371.2361
mysigma <- 7.2637
alpha <- .9999^2
h <- 2
ci <- mysigma*(1+alpha*(h-1))
ibmclose.ets2.F$mean[1]
## [1] 356.9995
ibmclose.ets2.F$mean[1] - ci
## [1] 342.4736
ibmclose.ets2.F$lower[1,'95%']
##      95% 
## 342.7629