NUmber 1

library(fpp)
## Warning: package 'fpp' was built under R version 3.4.4
## Loading required package: forecast
## Warning: package 'forecast' was built under R version 3.4.4
## Loading required package: fma
## Warning: package 'fma' was built under R version 3.4.4
## Loading required package: expsmooth
## Warning: package 'expsmooth' was built under R version 3.4.4
## Loading required package: lmtest
## Warning: package 'lmtest' was built under R version 3.4.4
## Loading required package: zoo
## Warning: package 'zoo' was built under R version 3.4.4
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
## Loading required package: tseries
## Warning: package 'tseries' was built under R version 3.4.4
books[,1]
## Time Series:
## Start = 1 
## End = 30 
## Frequency = 1 
##  [1] 199 172 111 209 161 119 195 195 131 183 143 141 168 201 155 243 225
## [18] 167 237 202 186 176 232 195 190 182 222 217 188 247
autoplot(books)

# The local mins and maxs are inversed between paper and hardcover for roughly the first 10 years and then then they more coinside with hardcovers typically being more popular.

autoplot(books)+
autolayer(ses(books[,1], h = 4), series = "Paper Prediction")+
autolayer(ses(books[,2], h = 4), series = "Hard Prediction")

paper.fit <- ses(books[,1], h = 4)
autoplot(paper.fit)

hard.fit <- ses(books[,2], h = 4)
autoplot(hard.fit)

accuracy(paper.fit) # 33.63769
##                    ME     RMSE     MAE       MPE     MAPE      MASE
## Training set 7.175981 33.63769 27.8431 0.4736071 15.57784 0.7021303
##                    ACF1
## Training set -0.2117522
accuracy(hard.fit) # 31.93101
##                    ME     RMSE      MAE      MPE     MAPE      MASE
## Training set 9.166735 31.93101 26.77319 2.636189 13.39487 0.7987887
##                    ACF1
## Training set -0.1417763
holt.paper <- holt(books[,1], h = 4)
autoplot(holt.paper)

accuracy(holt.paper) # 31.13692
##                     ME     RMSE      MAE       MPE     MAPE      MASE
## Training set -3.717178 31.13692 26.18083 -5.508526 15.58354 0.6602122
##                    ACF1
## Training set -0.1750792
holt.hard <- holt(books[,2], h = 4)
autoplot(holt.hard)

accuracy(holt.hard) # 27.19358
##                      ME     RMSE      MAE       MPE    MAPE      MASE
## Training set -0.1357882 27.19358 23.15557 -2.114792 12.1626 0.6908555
##                     ACF1
## Training set -0.03245186
paper.fit.95 <- 275.35 - 138.87
hard.fit.95 <- 304.34 - 174.78
holt.paper.95 <- 275.02 -143.913
holt.hard.95 <- 307.42 - 192.92

(paper.fit.95 - holt.paper.95)/paper.fit.95
## [1] 0.03936841
(hard.fit.95 - holt.hard.95)/hard.fit.95
## [1] 0.1162396

Number 2

autoplot(visitors)

train <- window(visitors, end = c(2003,4))

test <- window(visitors, start = c(2003,5))

fit <- hw(train, seasonal = "multiplicative")
# The amplitude of the data grows. If it were to stay the same then additive would be fine.

autoplot(fit)

fit1 <- ets(train)
fit1
## ETS(M,Ad,M) 
## 
## Call:
##  ets(y = train) 
## 
##   Smoothing parameters:
##     alpha = 0.6396 
##     beta  = 8e-04 
##     gamma = 1e-04 
##     phi   = 0.9799 
## 
##   Initial states:
##     l = 87.7602 
##     b = 3.0639 
##     s = 0.9392 1.0538 1.0792 0.9675 1.3275 1.091
##            1.0329 0.8867 0.9399 1.0231 0.8488 0.8103
## 
##   sigma:  0.0539
## 
##      AIC     AICc      BIC 
## 2300.157 2303.629 2360.912
fit2 <- ets(train, lambda = "auto", biasadj = TRUE)
fit2
## ETS(A,Ad,A) 
## 
## Call:
##  ets(y = train, lambda = "auto", biasadj = TRUE) 
## 
##   Box-Cox transformation: lambda= 0.3625 
## 
##   Smoothing parameters:
##     alpha = 0.563 
##     beta  = 0.0013 
##     gamma = 0.1726 
##     phi   = 0.98 
## 
##   Initial states:
##     l = 11.3832 
##     b = 0.1187 
##     s = -0.4418 0.3513 0.4399 -0.1824 2.2168 0.8041
##            0.1907 -0.9202 -0.3977 0.0445 -0.9873 -1.1178
## 
##   sigma:  0.3996
## 
##      AIC     AICc      BIC 
## 783.0430 786.5151 843.7981
fit3 <- snaive(train)
fit3
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## May 2003          329.9 289.9718 369.8282 268.8351 390.9649
## Jun 2003          339.4 299.4718 379.3282 278.3351 400.4649
## Jul 2003          418.2 378.2718 458.1282 357.1351 479.2649
## Aug 2003          371.9 331.9718 411.8282 310.8351 432.9649
## Sep 2003          358.6 318.6718 398.5282 297.5351 419.6649
## Oct 2003          428.9 388.9718 468.8282 367.8351 489.9649
## Nov 2003          437.0 397.0718 476.9282 375.9351 498.0649
## Dec 2003          534.0 494.0718 573.9282 472.9351 595.0649
## Jan 2004          396.6 356.6718 436.5282 335.5351 457.6649
## Feb 2004          427.5 387.5718 467.4282 366.4351 488.5649
## Mar 2004          392.5 352.5718 432.4282 331.4351 453.5649
## Apr 2004          321.5 281.5718 361.4282 260.4351 382.5649
## May 2004          329.9 273.4330 386.3670 243.5412 416.2588
## Jun 2004          339.4 282.9330 395.8670 253.0412 425.7588
## Jul 2004          418.2 361.7330 474.6670 331.8412 504.5588
## Aug 2004          371.9 315.4330 428.3670 285.5412 458.2588
## Sep 2004          358.6 302.1330 415.0670 272.2412 444.9588
## Oct 2004          428.9 372.4330 485.3670 342.5412 515.2588
## Nov 2004          437.0 380.5330 493.4670 350.6412 523.3588
## Dec 2004          534.0 477.5330 590.4670 447.6412 620.3588
## Jan 2005          396.6 340.1330 453.0670 310.2412 482.9588
## Feb 2005          427.5 371.0330 483.9670 341.1412 513.8588
## Mar 2005          392.5 336.0330 448.9670 306.1412 478.8588
## Apr 2005          321.5 265.0330 377.9670 235.1412 407.8588
fit4 <- ets(seasadj(stl(BoxCox(train, BoxCox.lambda(train)), s.window = "periodic")), biasadj = TRUE, lambda = "auto")
fit4
## ETS(A,A,N) 
## 
## Call:
##  ets(y = seasadj(stl(BoxCox(train, BoxCox.lambda(train)), s.window = "periodic")),  
## 
##  Call:
##      lambda = "auto", biasadj = TRUE) 
## 
##   Box-Cox transformation: lambda= 1.3027 
## 
##   Smoothing parameters:
##     alpha = 0.5387 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 17.903 
##     b = 0.096 
## 
##   sigma:  0.9385
## 
##      AIC     AICc      BIC 
## 1139.605 1139.891 1156.482
autoplot(visitors) + autolayer(forecast(fit2, biasadj = TRUE, lambda = BoxCox.lambda(train)), series = "ETS", PI = FALSE)

autoplot(forecast(fit1), series = "ETS")

autoplot(forecast(fit2, biasadj = TRUE, lambda = BoxCox.lambda(train)), series = "ETS Multi")

autoplot(fit3, series = "SNaive")

autoplot(forecast(fit4), series = "STL")

accuracy(forecast(fit1), test) # 14.53, 80.23
##                      ME     RMSE      MAE        MPE      MAPE     MASE
## Training set  0.7640074 14.53480 10.57657  0.1048224  3.994788 0.405423
## Test set     72.1992664 80.23124 74.55285 15.9202832 16.822384 2.857773
##                     ACF1 Theil's U
## Training set -0.05311217        NA
## Test set      0.58716982  1.127269
accuracy(forecast(fit2, biasadj = TRUE, lambda = BoxCox.lambda(train)), test) # 0.38, 60.46
##                      ME     RMSE      MAE         MPE      MAPE      MASE
## Training set  0.7860368 14.95811 10.80897  0.07008069  3.972248 0.4143313
## Test set     68.2019795 77.27352 71.20638 14.88023784 16.003651 2.7294955
##                     ACF1 Theil's U
## Training set -0.02536734        NA
## Test set      0.67232926  1.068379
accuracy(forecast(fit3), test) # 31.15, 50.3
##                    ME     RMSE      MAE      MPE      MAPE     MASE
## Training set 17.29363 31.15613 26.08775 7.192445 10.285961 1.000000
## Test set     32.87083 50.30097 42.24583 6.640781  9.962647 1.619375
##                   ACF1 Theil's U
## Training set 0.6327669        NA
## Test set     0.5725430 0.6594016
accuracy(InvBoxCox(forecast(fit4)$mean, lambda = BoxCox.lambda(train)), test) # 0.39, 82.736
##                ME     RMSE      MAE      MPE     MAPE      ACF1 Theil's U
## Test set 46.90414 82.73661 65.55577 8.342094 14.68567 0.3659344  1.121733
checkresiduals(fit3)

## 
##  Ljung-Box test
## 
## data:  Residuals from Seasonal naive method
## Q* = 295.02, df = 24, p-value < 2.2e-16
## 
## Model df: 0.   Total lags used: 24
# This one gives the best forecast. The residuals hover around zero with an ever slight skew to the right.

fit1.fun <- function(x,h) {forecast(ets(x))}

e1 <- tsCV(visitors, fit1.fun, h = 24)

rmse1 <- sqrt(mean(e1^2, na.rm=TRUE))

fit2.fun <- function(x,h) {forecast(ets(x, lambda = "auto"), biasadj = TRUE)}
e2 <- tsCV(visitors, fit2.fun, h=24)

rmse2 <- sqrt(mean(e2^2, na.rm=TRUE))

e3 <- tsCV(visitors, snaive, h = 24)

rmse3 <- sqrt(mean(e3^2, na.rm=TRUE))

fit4.fun <- function(x,h) {forecast(ets(seasadj(stl(BoxCox(x, BoxCox.lambda(x)), s.window = "periodic")), biasadj = TRUE, lambda = "auto"))}
e4 <- tsCV(visitors, fit4.fun, h = 24)

rmse4 <- sqrt(mean(e4^2, na.rm=TRUE))

rmse1 # 38.33
## [1] 38.33963
rmse2 # 37.84
## [1] 37.84179
rmse3 # 41.357
## [1] 41.35716
rmse4 # 336.3865
## [1] 336.2865

Question 3

fit1 <- ets(bicoal)
autoplot(forecast(fit1))

fit2 <- ets(chicken)
autoplot(forecast(fit2))

fit3 <- ets(dole)
autoplot(forecast(fit3))

# No it does not always give good forecasts as it does not even produce a forecast but rather it gives an estimatation of the model parameters. The est function does not work for the dole data. 

Question 4

autoplot(bricksq)

autoplot(forecast(ets(bricksq), h = 12))

autoplot(snaive(bricksq, h = 12))

autoplot(stlf(bricksq, h = 12))

autoplot(dole)

autoplot(forecast(ets(dole)))

autoplot(snaive(dole))

autoplot(stlf(dole))

autoplot(a10)

autoplot(forecast(ets(a10)))

autoplot(snaive(a10))

autoplot(BoxCox(a10, lambda = "auto"))

autoplot(a10) + 
autolayer(stlf(a10, lambda = "auto"), series = "Forecast")

Question 5

autoplot(ukcars)

cars.decomp <- stl(ukcars, s.window = "periodic", robust = TRUE)

cars.adj <- seasadj(cars.decomp)

autoplot(cars.adj)

fit1 <- stlf(ukcars, etsmodel = "AAN", damped = TRUE, h = 8)
fit2 <- stlf(cars.adj, etsmodel = "AAN", damped = FALSE, h = 8)
fit3 <- ets(ukcars)

accuracy(fit1) # 23.32
##                    ME     RMSE      MAE        MPE     MAPE     MASE
## Training set 1.551267 23.32113 18.48987 0.04121971 6.042764 0.602576
##                    ACF1
## Training set 0.02262668
accuracy(fit2) # 23.295
##                      ME   RMSE     MAE        MPE     MAPE      MASE
## Training set -0.3412727 23.295 18.1605 -0.5520141 5.884617 0.5918418
##                    ACF1
## Training set 0.02103582
accuracy(fit3) # 25.23
##                    ME     RMSE      MAE        MPE     MAPE      MASE
## Training set 1.313884 25.23244 20.17907 -0.1570979 6.629003 0.6576259
##                    ACF1
## Training set 0.02573334
autoplot(fit1)

autoplot(fit2)

autoplot(forecast(fit3, h = 8))

checkresiduals(fit3)

## 
##  Ljung-Box test
## 
## data:  Residuals from ETS(A,N,A)
## Q* = 18.324, df = 3, p-value = 0.0003772
## 
## Model df: 6.   Total lags used: 9

Questsion 6

autoplot(eggs)

autoplot(holt(eggs, damped = TRUE))

autoplot(holt(eggs, damped = FALSE))

autoplot(holt(eggs, lambda = "auto", damped = TRUE))

autoplot(holt(eggs, damped = FALSE, lambda = "auto"))

autoplot(holt(eggs, lambda = "auto", damped = TRUE, biasadj = TRUE))

autoplot(holt(eggs, damped = FALSE, lambda = "auto", biasadj = TRUE))

accuracy(holt(eggs, damped = TRUE)) # 26.54
##                     ME     RMSE     MAE       MPE     MAPE      MASE
## Training set -2.891496 26.54019 19.2795 -2.907633 10.01894 0.9510287
##                      ACF1
## Training set -0.003195358
accuracy(holt(eggs, damped = FALSE)) # 26.58
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set 0.04499087 26.58219 19.18491 -1.142201 9.653791 0.9463626
##                    ACF1
## Training set 0.01348202
accuracy(holt(eggs, lambda = "auto", damped = TRUE)) # 26.53
##                      ME     RMSE      MAE       MPE     MAPE      MASE
## Training set -0.8200445 26.53321 19.45654 -2.019718 9.976131 0.9597618
##                     ACF1
## Training set 0.005852382
accuracy(holt(eggs, damped = FALSE, lambda = "auto")) # 26.39
##                     ME     RMSE      MAE       MPE     MAPE      MASE
## Training set 0.7736844 26.39376 18.96387 -1.072416 9.620095 0.9354593
##                    ACF1
## Training set 0.03887152
accuracy(holt(eggs, lambda = "auto", damped = TRUE, biasadj = TRUE)) # 26.58
##                     ME     RMSE      MAE      MPE    MAPE      MASE
## Training set -1.806213 26.58589 19.55896 -2.58425 10.1176 0.9648141
##                     ACF1
## Training set 0.005322063
accuracy(holt(eggs, damped = FALSE, lambda = "auto", biasadj = TRUE)) # 26.386
##                      ME     RMSE      MAE      MPE     MAPE      MASE
## Training set -0.2015298 26.38689 18.99362 -1.63043 9.713172 0.9369265
##                   ACF1
## Training set 0.0383996
# These models are similar in how they forecast. The boxcox non damped bias adjusted has the best RMSE, but only by bit above the nonbias adjusted. If it doesn't negativatily impact the model I would typically use the bias adjusted in order to preserve the usefullness of the CI. The trastformation helps smooth the distribution and to show the trend.