Develop Various Forecast Models for Idly, Continental Breakfast and Omelette. Identify the best forecasting model using MAPE. Loading fpp2 package
library(fpp2)
## Warning: package 'fpp2' was built under R version 3.6.1
## Loading required package: ggplot2
## Registered S3 methods overwritten by 'ggplot2':
## method from
## [.quosures rlang
## c.quosures rlang
## print.quosures rlang
## Loading required package: forecast
## Warning: package 'forecast' was built under R version 3.6.1
## Registered S3 method overwritten by 'xts':
## method from
## as.zoo.xts zoo
## Registered S3 method overwritten by 'quantmod':
## method from
## as.zoo.data.frame zoo
## Registered S3 methods overwritten by 'forecast':
## method from
## fitted.fracdiff fracdiff
## residuals.fracdiff fracdiff
## Loading required package: fma
## Warning: package 'fma' was built under R version 3.6.1
## Loading required package: expsmooth
## Warning: package 'expsmooth' was built under R version 3.6.1
Reading Main CSV
bf.csv <- read.csv("Breakfast.csv",header=TRUE)
Lets see the structure
str(bf.csv)
## 'data.frame': 115 obs. of 9 variables:
## $ Date : Factor w/ 115 levels "01-01-2013","01-10-2012",..: 2 6 10 14 18 22 26 30 34 38 ...
## $ BKFST_OCCUP : int 250 236 244 239 221 211 208 219 229 220 ...
## $ Idly : int 40 65 59 65 70 63 67 63 62 55 ...
## $ Dosa : int 30 33 21 42 26 44 28 27 29 24 ...
## $ Chutney : int 95 120 119 140 126 135 120 120 130 135 ...
## $ Sambar : int 95 120 120 150 126 135 120 120 130 135 ...
## $ Continental.B.F : int 25 25 25 35 41 30 40 40 40 40 ...
## $ North.Indian.B.F: int 6 2 2 5 3 3 4 4 5 2 ...
## $ Omellette : int 15 7 8 10 13 5 12 12 13 13 ...
Lets see the Forecasting Model for Idly. Lets take out idly
Idly.csv <- bf.csv[,"Idly"]
str(Idly.csv)
## int [1:115] 40 65 59 65 70 63 67 63 62 55 ...
Lets Convert it into a time series assuming a weekly seasonality
Idly.ts <- ts(Idly.csv,frequency=7)
str(Idly.ts)
## Time-Series [1:115] from 1 to 17.3: 40 65 59 65 70 63 67 63 62 55 ...
Visualizing it
autoplot(Idly.ts)+
ggtitle("Number of People Ordering Idly") +
xlab("Week") +
ylab("Number of People")
Let us draw a regression line with time.
New.time <- time(Idly.ts)
plot(Idly.ts)
abline(reg=lm(Idly.ts~New.time))
So there is a negative trend line with time. Lets decompose the series
plot(decompose(Idly.ts))
Decomposition of mulitplicative time series
plot(decompose(Idly.ts,type="multiplicative"))
Both the charts of additive and multiplicative time series are similar.
Lets see the autocorrelation
acf(Idly.ts)
ACF suggests that the series is not stationary. Lets check the PACF
pacf(Idly.ts)
One spike suggests that it is an AR(1) series. Let us see the ACF for Random component
Idly.ts.decom <- decompose(Idly.ts)
acf(Idly.ts.decom$random,na.action = na.pass)
Lets check the PACF
pacf(Idly.ts.decom$random,na.action = na.pass)
So it is also an MA process. Two spikes in the PACF suggests that.
Lets check if the seasonal adjustment has been effective. But before that we need to find out which one have become na after decomposition.
which(is.na(Idly.ts.decom$random))
## [1] 1 2 3 113 114 115
Now I will exclude them in order to find the sd.
sd(Idly.ts[4:112])
## [1] 6.77076
Now lets see the sd when we remove the trend
sd(Idly.ts[4:112]-Idly.ts.decom$trend[4:112])
## [1] 4.661139
So detrending has been effective. Lets see the sd of the random components after deseasoning
sd(Idly.ts.decom$random[4:112])
## [1] 4.561921
It has not reduced much so deseasoning has not been effective
Idlytr.ts <- window(Idly.ts,end=c(12,5))
Idly.ses <- ses(Idlytr.ts,h=33)
Idly.ses
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 12.71429 51.84935 44.72275 58.97596 40.95015 62.74855
## 12.85714 51.84935 44.48346 59.21524 40.58420 63.11451
## 13.00000 51.84935 44.25171 59.44699 40.22976 63.46894
## 13.14286 51.84935 44.02682 59.67188 39.88582 63.81288
## 13.28571 51.84935 43.80822 59.89049 39.55150 64.14721
## 13.42857 51.84935 43.59540 60.10330 39.22602 64.47268
## 13.57143 51.84935 43.38794 60.31077 38.90873 64.78997
## 13.71429 51.84935 43.18544 60.51326 38.59904 65.09966
## 13.85714 51.84935 42.98757 60.71114 38.29642 65.40228
## 14.00000 51.84935 42.79402 60.90468 38.00041 65.69829
## 14.14286 51.84935 42.60452 61.09418 37.71060 65.98810
## 14.28571 51.84935 42.41883 61.27987 37.42661 66.27209
## 14.42857 51.84935 42.23673 61.46198 37.14811 66.55059
## 14.57143 51.84935 42.05801 61.64070 36.87478 66.82392
## 14.71429 51.84935 41.88249 61.81621 36.60636 67.09235
## 14.85714 51.84935 41.71002 61.98869 36.34258 67.35613
## 15.00000 51.84935 41.54043 62.15828 36.08321 67.61549
## 15.14286 51.84935 41.37358 62.32512 35.82804 67.87066
## 15.28571 51.84935 41.20935 62.48935 35.57687 68.12183
## 15.42857 51.84935 41.04762 62.65109 35.32952 68.36918
## 15.57143 51.84935 40.88827 62.81043 35.08582 68.61288
## 15.71429 51.84935 40.73121 62.96750 34.84562 68.85309
## 15.85714 51.84935 40.57633 63.12237 34.60875 69.08995
## 16.00000 51.84935 40.42355 63.27515 34.37510 69.32360
## 16.14286 51.84935 40.27279 63.42591 34.14454 69.55417
## 16.28571 51.84935 40.12397 63.57473 33.91693 69.78177
## 16.42857 51.84935 39.97702 63.72169 33.69218 70.00652
## 16.57143 51.84935 39.83186 63.86685 33.47018 70.22853
## 16.71429 51.84935 39.68843 64.01027 33.25083 70.44788
## 16.85714 51.84935 39.54667 64.15203 33.03403 70.66467
## 17.00000 51.84935 39.40653 64.29217 32.81970 70.87900
## 17.14286 51.84935 39.26795 64.43075 32.60776 71.09094
## 17.28571 51.84935 39.13088 64.56782 32.39813 71.30057
Idly.ses[["model"]]
## Simple exponential smoothing
##
## Call:
## ses(y = Idlytr.ts, h = 33)
##
## Smoothing parameters:
## alpha = 0.2613
##
## Initial states:
## l = 57.442
##
## sigma: 5.5609
##
## AIC AICc BIC
## 646.7114 647.0191 653.9315
autoplot(Idly.ts) +
autolayer(Idly.ses,PI=FALSE)+
autolayer(fitted(Idly.ses), series="Fitted") +
ylab("Number of Persons Ordering Idly") + xlab("Week")
Lets check the accuracy of the forecast
test.ses <- window(Idly.ts,start=c(12,6))
accuracy(Idly.ses,test.ses)
## ME RMSE MAE MPE MAPE MASE
## Training set -0.2610105 5.492684 3.930575 -1.334975 6.992334 0.7882168
## Test set 6.7567089 10.094150 8.501849 9.952551 13.799975 1.7049163
## ACF1 Theil's U
## Training set 0.04055495 NA
## Test set 0.31046109 1.112766
So the MAPE is coming out to be 13.7% in the test data.
Idly.des <- holt(Idlytr.ts,h=33)
Idly.des
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 12.71429 51.53010 44.32080 58.73941 40.50442 62.55579
## 12.85714 51.45467 44.02248 58.88686 40.08811 62.82122
## 13.00000 51.37923 43.73048 59.02798 39.68148 63.07698
## 13.14286 51.30379 43.44428 59.16330 39.28371 63.32387
## 13.28571 51.22835 43.16343 59.29328 38.89412 63.56259
## 13.42857 51.15291 42.88752 59.41831 38.51209 63.79374
## 13.57143 51.07748 42.61621 59.53874 38.13708 64.01787
## 13.71429 51.00204 42.34918 59.65490 37.76863 64.23545
## 13.85714 50.92660 42.08615 59.76705 37.40630 64.44690
## 14.00000 50.85116 41.82688 59.87545 37.04971 64.65262
## 14.14286 50.77572 41.57113 59.98032 36.69851 64.85294
## 14.28571 50.70029 41.31871 60.08186 36.35241 65.04817
## 14.42857 50.62485 41.06943 60.18026 36.01110 65.23859
## 14.57143 50.54941 40.82313 60.27569 35.67434 65.42448
## 14.71429 50.47397 40.57964 60.36831 35.34189 65.60605
## 14.85714 50.39853 40.33883 60.45824 35.01354 65.78353
## 15.00000 50.32310 40.10056 60.54563 34.68908 65.95711
## 15.14286 50.24766 39.86472 60.63059 34.36833 66.12699
## 15.28571 50.17222 39.63120 60.71324 34.05113 66.29332
## 15.42857 50.09678 39.39990 60.79367 33.73730 66.45626
## 15.57143 50.02134 39.17070 60.87199 33.42672 66.61597
## 15.71429 49.94591 38.94354 60.94827 33.11924 66.77257
## 15.85714 49.87047 38.71833 61.02261 32.81474 66.92620
## 16.00000 49.79503 38.49498 61.09508 32.51310 67.07696
## 16.14286 49.71959 38.27343 61.16575 32.21420 67.22498
## 16.28571 49.64416 38.05361 61.23470 31.91795 67.37036
## 16.42857 49.56872 37.83546 61.30198 31.62424 67.51319
## 16.57143 49.49328 37.61890 61.36766 31.33299 67.65357
## 16.71429 49.41784 37.40390 61.43178 31.04410 67.79158
## 16.85714 49.34240 37.19039 61.49442 30.75750 67.92731
## 17.00000 49.26697 36.97832 61.55561 30.47311 68.06082
## 17.14286 49.19153 36.76765 61.61540 30.19085 68.19220
## 17.28571 49.11609 36.55833 61.67384 29.91066 68.32152
Idly.des[["model"]]
## Holt's method
##
## Call:
## holt(y = Idlytr.ts, h = 33)
##
## Smoothing parameters:
## alpha = 0.2505
## beta = 1e-04
##
## Initial states:
## l = 57.5951
## b = -0.0755
##
## sigma: 5.6255
##
## AIC AICc BIC
## 650.5276 651.3171 662.5612
autoplot(Idly.ts) +
autolayer(Idly.des,PI=FALSE)+
autolayer(fitted(Idly.des), series="Fitted") +
ylab("Number of Persons Ordering Idly") + xlab("Week")
test.des <- window(Idly.ts,start=c(12,6))
accuracy(Idly.des,test.des)
## ME RMSE MAE MPE MAPE MASE
## Training set 0.001674603 5.486532 3.945749 -0.8957688 6.989874 0.7912599
## Test set 8.282963788 11.318770 9.484698 12.5665128 15.227549 1.9020115
## ACF1 Theil's U
## Training set 0.04735655 NA
## Test set 0.35146288 1.256779
Idly.tes.a <- hw(Idlytr.ts,seasonal="additive",h=33)
Idly.tes.m <- hw(Idlytr.ts,seasonal="multiplicative",h=33)
Idly.tes.m
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 12.71429 50.73270 44.47146 56.99394 41.15696 60.30844
## 12.85714 50.41749 44.13998 56.69499 40.81687 60.01810
## 13.00000 48.27785 42.21399 54.34171 39.00398 57.55172
## 13.14286 49.79946 43.49011 56.10881 40.15014 59.44877
## 13.28571 50.02487 43.63240 56.41734 40.24842 59.80131
## 13.42857 50.82405 44.27406 57.37404 40.80670 60.84140
## 13.57143 52.04553 45.28143 58.80963 41.70073 62.39033
## 13.71429 49.68010 43.16924 56.19095 39.72260 59.63759
## 13.85714 49.36831 42.84460 55.89202 39.39116 59.34547
## 14.00000 47.27020 40.97233 53.56808 37.63843 56.90197
## 14.14286 48.75695 42.20797 55.30592 38.74115 58.77274
## 14.28571 48.97450 42.34305 55.60594 38.83257 59.11642
## 14.42857 49.75369 42.96263 56.54474 39.36767 60.13970
## 14.57143 50.94614 43.93693 57.95535 40.22648 61.66580
## 14.71429 48.62749 41.88431 55.37068 38.31468 58.94030
## 14.85714 48.31914 41.56615 55.07212 37.99134 58.64694
## 15.00000 46.26256 39.74665 52.77846 36.29734 56.22777
## 15.14286 47.71443 40.94209 54.48677 37.35703 58.07183
## 15.28571 47.92412 41.06982 54.77843 37.44137 58.40688
## 15.42857 48.68332 41.66737 55.69927 37.95335 59.41329
## 15.57143 49.84674 42.60876 57.08472 38.77720 60.91628
## 15.71429 47.57489 40.61476 54.53501 36.93030 58.21947
## 15.85714 47.26996 40.30280 54.23712 36.61461 57.92531
## 16.00000 45.25491 38.53526 51.97457 34.97809 55.53174
## 16.14286 46.67192 39.69077 53.65308 35.99517 57.34868
## 16.28571 46.87375 39.81103 53.93647 36.07225 57.67525
## 16.42857 47.61296 40.38660 54.83932 36.56120 58.66472
## 16.57143 48.74734 41.29525 56.19944 37.35035 60.14434
## 16.71429 46.52228 39.35905 53.68551 35.56707 57.47749
## 16.85714 46.22079 39.05304 53.38853 35.25867 57.18291
## 17.00000 44.24727 37.33673 51.15781 33.67851 54.81603
## 17.14286 45.62941 38.45256 52.80626 34.65336 56.60546
## 17.28571 45.82338 38.56527 53.08149 34.72305 56.92370
Idly.tes.a[["model"]];
## Holt-Winters' additive method
##
## Call:
## hw(y = Idlytr.ts, h = 33, seasonal = "additive")
##
## Smoothing parameters:
## alpha = 0.1795
## beta = 1e-04
## gamma = 1e-04
##
## Initial states:
## l = 63.7758
## b = -0.1322
## s = -0.057 0.6982 1.7827 0.5568 -0.3835 -1.1747
## -1.4225
##
## sigma: 5.8337
##
## AIC AICc BIC
## 662.7787 667.3004 691.6593
Idly.tes.m[["model"]]
## Holt-Winters' multiplicative method
##
## Call:
## hw(y = Idlytr.ts, h = 33, seasonal = "multiplicative")
##
## Smoothing parameters:
## alpha = 0.1323
## beta = 1e-04
## gamma = 1e-04
##
## Initial states:
## l = 64.77
## b = -0.1492
## s = 0.9962 0.9995 1.0439 1.0163 0.9974 0.9899
## 0.9568
##
## sigma: 0.0963
##
## AIC AICc BIC
## 660.0089 664.5306 688.8895
autoplot(Idly.ts) +
autolayer(Idly.tes.a,PI=FALSE)+
autolayer(fitted(Idly.tes.a), series="Fitted") +
ylab("Number of Persons Ordering Idly") + xlab("Week")
autoplot(Idly.ts) +
autolayer(Idly.tes.m,PI=FALSE)+
autolayer(fitted(Idly.tes.m), series="Fitted") +
ylab("Number of Persons Ordering Idly") + xlab("Week")
Accuracy
test.tes.a <- window(Idly.ts,start=c(12,6))
accuracy(Idly.tes.a,test.tes.a)
## ME RMSE MAE MPE MAPE MASE
## Training set -0.1349717 5.428335 3.84049 -1.177243 6.873018 0.7701518
## Test set 9.9335697 12.817322 10.74608 15.369212 17.194018 2.1549619
## ACF1 Theil's U
## Training set 0.05247843 NA
## Test set 0.37741909 1.433086
Accuracy- Multiplicative
test.tes.m <- window(Idly.ts,start=c(12,6))
accuracy(Idly.tes.m,test.tes.m)
## ME RMSE MAE MPE MAPE MASE
## Training set -0.1649576 5.503636 4.052919 -1.266474 7.238209 0.8127511
## Test set 10.3424701 13.103458 11.055588 16.097713 17.709870 2.2170296
## ACF1 Theil's U
## Training set 0.1060432 NA
## Test set 0.3845178 1.468793
Idly.meanf <- meanf(Idlytr.ts, h=33)
Idly.Naive <- naive(Idlytr.ts, h=33)
Idly.snaive <- snaive(Idlytr.ts, h=33)
Idly.rwf <- rwf(Idlytr.ts, h=33, drift=TRUE)
autoplot(Idlytr.ts) +
autolayer(meanf(Idlytr.ts, h=33),
series="Mean", PI=FALSE) +
autolayer(naive(Idlytr.ts, h=33),
series="Naïve", PI=FALSE) +
autolayer(snaive(Idlytr.ts, h=33),
series="Seasonal naïve", PI=FALSE) +
autolayer(rwf(Idlytr.ts, h=33, drift=TRUE),
series="Naive with Drift",PI=FALSE)+
ggtitle("Forecast of Number of People Ordering Idly") +
xlab("Year") + ylab("Number") +
guides(colour=guide_legend(title="Forecast"))
Checking Accuracy
Idly.tes.sm <- window(Idly.ts,start=c(12,6))
accuracy(Idly.meanf,Idly.tes.sm)
## ME RMSE MAE MPE MAPE MASE
## Training set -2.253448e-15 6.775485 5.831648 -1.447504 10.35194 1.169448
## Test set -9.305248e-01 7.556760 6.433851 -3.397968 11.70766 1.290211
## ACF1 Theil's U
## Training set 0.4941441 NA
## Test set 0.3104611 0.8375994
Idly.tes.sm <- window(Idly.ts,start=c(12,6))
accuracy(Idly.Naive,Idly.tes.sm)
## ME RMSE MAE MPE MAPE MASE
## Training set 0.1481481 6.450189 4.592593 -0.3399679 7.858181 0.9209745
## Test set 6.6060606 9.993938 8.424242 9.6909185 13.690394 1.6893534
## ACF1 Theil's U
## Training set -0.3614787 NA
## Test set 0.3104611 1.101115
Idly.tes.sm <- window(Idly.ts,start=c(12,6))
accuracy(Idly.snaive,Idly.tes.sm)
## ME RMSE MAE MPE MAPE MASE
## Training set -0.9333333 7.397297 4.986667 -2.426169 8.831738 1.000000
## Test set 7.4545455 10.511177 8.787879 11.221922 14.218642 1.762275
## ACF1 Theil's U
## Training set 0.2451623 NA
## Test set 0.3299251 1.174489
Idly.tes.sm <- window(Idly.ts,start=c(12,6))
accuracy(Idly.rwf,Idly.tes.sm)
## ME RMSE MAE MPE MAPE
## Training set -4.210902e-15 6.448487 4.605396 -0.5909493 7.891637
## Test set 4.087542e+00 8.338557 6.859708 5.3890580 11.457487
## MASE ACF1 Theil's U
## Training set 0.9235419 -0.3614787 NA
## Test set 1.3756099 0.2537866 0.9037068
(lambda <- BoxCox.lambda(Idly.ts))
## [1] 1.999924
autoplot(BoxCox(Idly.ts,lambda))
There is no change. So Transformation is not helping
Lets difference this series and see
plot(diff(Idlytr.ts))
Lets Apply the KPSS root test to see if the differencing is required
library(urca)
## Warning: package 'urca' was built under R version 3.6.1
Idlytr.ts %>% ur.kpss() %>% summary()
##
## #######################
## # KPSS Unit Root Test #
## #######################
##
## Test is of type: mu with 3 lags.
##
## Value of test-statistic is: 1.0638
##
## Critical value for a significance level of:
## 10pct 5pct 2.5pct 1pct
## critical values 0.347 0.463 0.574 0.739
Here the value is more than 2.5% critical value so differencing is required.Lets see the value after differencing
Idlytr.ts %>% diff() %>% ur.kpss() %>% summary()
##
## #######################
## # KPSS Unit Root Test #
## #######################
##
## Test is of type: mu with 3 lags.
##
## Value of test-statistic is: 0.1985
##
## Critical value for a significance level of:
## 10pct 5pct 2.5pct 1pct
## critical values 0.347 0.463 0.574 0.739
Now the value is less than the critical value. so a differencing of 1 is required. Lets see if we require seasonal differencing
Idlytr.ts %>% nsdiffs()
## [1] 0
So seasonal differncing is not required. Lets fit an auto ARIMA
(Idly.aa <- auto.arima(Idlytr.ts, seasonal=FALSE,
stepwise=FALSE, approximation=FALSE))
## Series: Idlytr.ts
## ARIMA(0,1,1)
##
## Coefficients:
## ma1
## -0.7244
## s.e. 0.0975
##
## sigma^2 estimated as 30.93: log likelihood=-253.79
## AIC=511.58 AICc=511.74 BIC=516.37
So it is a IMA process. Plotting Forecasting
autoplot(forecast(Idly.aa))
Idly.tes.aa <- window(Idly.ts,start=c(12,6))
accuracy(forecast(Idly.aa),Idly.tes.aa)
## ME RMSE MAE MPE MAPE MASE
## Training set 0.1122234 5.493451 3.861200 -0.5406289 6.680714 0.7743047
## Test set 4.9823395 9.408378 6.589314 7.0910360 10.304984 1.3213865
## ACF1 Theil's U
## Training set 0.1613833 NA
## Test set 0.6412780 1.452244
We got a MAPE of 10%.
fit.Idlytr.ts <- tslm(Idlytr.ts ~ trend + season)
summary(fit.Idlytr.ts)
##
## Call:
## tslm(formula = Idlytr.ts ~ trend + season)
##
## Residuals:
## Min 1Q Median 3Q Max
## -23.5098 -3.2130 0.3654 3.5865 11.4969
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 63.65724 2.06250 30.864 < 2e-16 ***
## trend -0.14744 0.02812 -5.243 1.45e-06 ***
## season2 1.14744 2.45773 0.467 0.642
## season3 1.29488 2.45821 0.527 0.600
## season4 2.69232 2.45901 1.095 0.277
## season5 3.67310 2.46014 1.493 0.140
## season6 3.02419 2.51315 1.203 0.233
## season7 2.26254 2.51378 0.900 0.371
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 6.02 on 74 degrees of freedom
## Multiple R-squared: 0.2876, Adjusted R-squared: 0.2203
## F-statistic: 4.269 on 7 and 74 DF, p-value: 0.0005217
Considering only Trend
fit.Idlytr.ts <- tslm(Idlytr.ts ~ trend)
summary(fit.Idlytr.ts)
##
## Call:
## tslm(formula = Idlytr.ts ~ trend)
##
## Residuals:
## Min 1Q Median 3Q Max
## -25.418 -3.915 0.173 3.822 12.150
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 65.56278 1.31767 49.756 < 2e-16 ***
## trend -0.14521 0.02758 -5.265 1.15e-06 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 5.912 on 80 degrees of freedom
## Multiple R-squared: 0.2573, Adjusted R-squared: 0.248
## F-statistic: 27.72 on 1 and 80 DF, p-value: 1.152e-06
Trying non linear regression
h <- 10
fit.lin <- tslm(Idlytr.ts ~ trend)
fcasts.lin <- forecast(fit.lin, h = h)
fit.exp <- tslm(Idlytr.ts ~ trend, lambda = 0)
fcasts.exp <- forecast(fit.exp, h = h)
t <- time(Idlytr.ts)
t.break1 <- 69
t.break2 <- 95
tb1 <- ts(pmax(0, t - t.break1), start = 1)
tb2 <- ts(pmax(0, t - t.break2), start = 1)
fit.pw <- tslm(Idlytr.ts ~ t + tb1 + tb2)
t.new <- t[length(t)] + seq(h)
tb1.new <- tb1[length(tb1)] + seq(h)
tb2.new <- tb2[length(tb2)] + seq(h)
newdata <- cbind(t=t.new, tb1=tb1.new, tb2=tb2.new) %>%
as.data.frame()
fcasts.pw <- forecast(fit.pw, newdata = newdata)
## Warning in predict.lm(predict_object, newdata = newdata, se.fit = TRUE, :
## prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(predict_object, newdata = newdata, se.fit = TRUE, :
## prediction from a rank-deficient fit may be misleading
fit.spline <- tslm(Idlytr.ts ~ t + I(t^2) + I(t^3) +
I(tb1^3) + I(tb2^3))
fcasts.spl <- forecast(fit.spline, newdata = newdata)
## Warning in predict.lm(predict_object, newdata = newdata, se.fit = TRUE, :
## prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(predict_object, newdata = newdata, se.fit = TRUE, :
## prediction from a rank-deficient fit may be misleading
autoplot(Idlytr.ts) +
autolayer(fitted(fit.lin), series = "Linear") +
autolayer(fitted(fit.exp), series = "Exponential") +
autolayer(fitted(fit.pw), series = "Piecewise") +
autolayer(fitted(fit.spline), series = "Cubic Spline") +
autolayer(fcasts.pw, series="Piecewise") +
autolayer(fcasts.lin, series="Linear", PI=FALSE) +
autolayer(fcasts.exp, series="Exponential", PI=FALSE) +
autolayer(fcasts.spl, series="Cubic Spline", PI=FALSE) +
xlab("week") + ylab("Idly Orders") +
ggtitle("Idly Orders") +
guides(colour = guide_legend(title = " "))