Read data and find Top 10 Companies
library(readr)
SPstocks <- read_csv("C:/Users/buffe/Documents/all_SPstocks_5yr.csv")
## Parsed with column specification:
## cols(
## date = col_date(format = ""),
## open = col_double(),
## high = col_double(),
## low = col_double(),
## close = col_double(),
## volume = col_integer(),
## Name = col_character()
## )
View(SPstocks)
top10 <- SPstocks[SPstocks$Name == "JPM" | SPstocks$Name == "BRK.B" | SPstocks$Name == "BAC" | SPstocks$Name == "WFC" | SPstocks$Name == "AAPL" | SPstocks$Name == "XOM" | SPstocks$Name == "T" | SPstocks$Name == "VZ" | SPstocks$Name == "MSFT" | SPstocks$Name == "CVX",]
table(top10$Name)
##
## AAPL BAC BRK.B CVX JPM MSFT T VZ WFC XOM
## 1259 1259 1259 1259 1259 1259 1259 1259 1259 1259
Time Series for each company stock
#Company 1 JP Morgan
library(fpp)
## Warning: package 'fpp' was built under R version 3.4.4
## Loading required package: forecast
## Warning: package 'forecast' was built under R version 3.4.4
## Loading required package: fma
## Warning: package 'fma' was built under R version 3.4.4
## Loading required package: expsmooth
## Warning: package 'expsmooth' was built under R version 3.4.4
## Loading required package: lmtest
## Warning: package 'lmtest' was built under R version 3.4.4
## Loading required package: zoo
## Warning: package 'zoo' was built under R version 3.4.4
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
## Loading required package: tseries
## Warning: package 'tseries' was built under R version 3.4.4
jp <- top10[top10$Name == "JPM",]
jp.ts <- ts(jp$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(jp.ts)
#Company 2 Berkshire Hathaway
brk <- top10[top10$Name == "BRK.B",]
brk.ts <- ts(brk$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(brk.ts)
#Company 3 Bank of America
bac <- top10[top10$Name == "BAC",]
bac.ts <- ts(bac$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(bac.ts)
#Company 4 Wells Fargo
wfc <- top10[top10$Name == "WFC",]
wfc.ts <- ts(wfc$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(wfc.ts)
#Company 5 Apple
aapl <- top10[top10$Name == "AAPL",]
aapl.ts <- ts(aapl$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(aapl.ts)
#Company 6 Exxon Mobile
xom <- top10[top10$Name == "XOM",]
xom.ts <- ts(xom$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(xom.ts)
#Company 7 AT&T
at <- top10[top10$Name == "T",]
at.ts <- ts(at$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(at.ts)
#Company 8 Verizon
vz <- top10[top10$Name == "VZ",]
vz.ts <- ts(vz$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(vz.ts)
#Company 9 Microsoft
mfst <- top10[top10$Name == "MSFT",]
mfst.ts <- ts(mfst$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(mfst.ts)
#Company 10 Chevron
cvx <- top10[top10$Name == "CVX",]
cvx.ts <- ts(cvx$open, start = c(2013,30), frequency = (5/7)*365.25 - 6 - 3*(5/7))
autoplot(cvx.ts)
# I know that the market crashed in Febuary 2018 but I am choosing to leave it in.
Forecast for JP Morgan #1
library(ggplot2)
## Warning: package 'ggplot2' was built under R version 3.4.3
library(fpp2)
## Warning: package 'fpp2' was built under R version 3.4.4
##
## Attaching package: 'fpp2'
## The following objects are masked from 'package:fpp':
##
## ausair, ausbeer, austa, austourists, debitcards, departures,
## elecequip, euretail, guinearice, oil, sunspotarea, usmelec
autoplot(jp.ts)
#The overall trend is positive. It has a slight upward trend except for late 2016 where it has a very large decrease.
dec.jp.m <- decompose(jp.ts, type = "multiplicative")
dec.jp.a <- decompose(jp.ts, type = "additive")
autoplot(dec.jp.m)
autoplot(dec.jp.a)
autoplot(jp.ts) +
autolayer(seasadj(dec.jp.a)) +
autolayer(seasadj(dec.jp.m))
autoplot(BoxCox(jp.ts, lambda = "auto"))
box.jp <- BoxCox(jp.ts, lambda = "auto")
dec.box.jp.a <- decompose(box.jp, type= "additive")
dec.box.jp.m <- decompose(box.jp, type= "multiplicative")
autoplot(dec.box.jp.a)
autoplot(dec.box.jp.m)
autoplot(box.jp) +
autolayer(seasadj(dec.box.jp.a)) +
autolayer(seasadj(dec.box.jp.m))
ma.data10 <- ma(jp.ts, 10)
ma.data5 <- ma(jp.ts, 5)
ma.data7 <- ma(jp.ts, 7)
autoplot(jp.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(jp.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(jp.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(jp.ts,20)
autoplot(jp.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(jp.ts,30)
autoplot(jp.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ma.data40 <- ma(jp.ts,40)
autoplot(jp.ts) +
autolayer(ma.data40, series = "MA-40")
## Warning: Removed 40 rows containing missing values (geom_path).
mstl.jp <- mstl(jp.ts)
autoplot(mstl.jp)
box.mstl <- mstl(jp.ts, lambda = "auto")
ses.jp <-ses(jp.ts, h = 253)
autoplot(jp.ts)+
autolayer(ses.jp, series = "Pred")
accuracy(ses.jp) #0.8794852
## ME RMSE MAE MPE MAPE
## Training set 0.05207357 0.8794852 0.6337509 0.06012842 0.9537955
## MASE ACF1
## Training set 0.05575177 -0.002784246
holt.jp <- holt(jp.ts, h = 253)
autoplot(jp.ts)+
autolayer(holt.jp, series = "Pred")
accuracy(holt.jp) #.8780976
## ME RMSE MAE MPE MAPE
## Training set 0.001160113 0.8780976 0.6299774 -0.0188683 0.9490724
## MASE ACF1
## Training set 0.05541981 -0.003557542
ets.jp <- ets(jp.ts, model = "ZZN")
autoplot(jp.ts)+
autolayer(forecast(ets.jp, h = 253), series = "Pred")
accuracy(ets.jp) #0.8796202
## ME RMSE MAE MPE MAPE
## Training set 0.05299893 0.8796202 0.6337817 0.06123414 0.9537675
## MASE ACF1
## Training set 0.05575448 0.0144141
ets.jp.damp <- ets(jp.ts, model = "ZZN", damped = TRUE)
autoplot(jp.ts)+
autolayer(forecast(ets.jp.damp, h = 253), series = "Pred")
accuracy(ets.jp.damp) #0.8796643
## ME RMSE MAE MPE MAPE
## Training set 0.04921505 0.8796643 0.6337731 0.05364127 0.9537835
## MASE ACF1
## Training set 0.05575372 0.01930471
ets.jp.box <- ets(jp.ts, model = "ZZN", lambda = "auto")
autoplot(jp.ts)+
autolayer(forecast(ets.jp.box, h = 253), series = "Pred")
accuracy(ets.jp.box) #0.8797739
## ME RMSE MAE MPE MAPE
## Training set 0.05341255 0.8797739 0.6338999 0.06187507 0.9539438
## MASE ACF1
## Training set 0.05576488 0.01336792
ets.jp.box.damp <- ets(jp.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(jp.ts)+
autolayer(forecast(ets.jp.box.damp, h = 253), series = "Pred")
accuracy(ets.jp.box.damp) #0.8797593
## ME RMSE MAE MPE MAPE
## Training set 0.05212368 0.8797593 0.6336928 0.05929282 0.9535378
## MASE ACF1
## Training set 0.05574666 0.01398419
stl.jp <- stlf(jp.ts, h = 253)
autoplot(jp.ts)+
autolayer(forecast(stl.jp, h = 253), series = "Pred")
accuracy(stl.jp) #0.7836143
## ME RMSE MAE MPE MAPE
## Training set 0.006239975 0.7836143 0.5711303 -0.00882056 0.862666
## MASE ACF1
## Training set 0.05024296 0.01849409
#This model has the best RMSE
stl.jp.box <- stlf(jp.ts, h = 253, lambda ="auto")
autoplot(jp.ts)+
autolayer(forecast(stl.jp.box, h = 253), series = "Pred")
accuracy(stl.jp.box) #0.8172862
## ME RMSE MAE MPE MAPE MASE
## Training set 0.0532009 0.8172862 0.5897676 0.06179761 0.8709118 0.05188251
## ACF1
## Training set 0.02742793
stl.jp.box.rob <- stlf(jp.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(jp.ts)+
autolayer(forecast(stl.jp.box.rob, h = 253), series = "Pred")
accuracy(stl.jp.box.rob) #0.9439154
## ME RMSE MAE MPE MAPE MASE
## Training set 0.05182388 0.9439154 0.672278 0.05940039 0.9937733 0.05914104
## ACF1
## Training set 0.03857275
checkresiduals(stl.jp)
## Warning in checkresiduals(stl.jp): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(M,A,N)
## Q* = 278.2, df = 247.8, p-value = 0.08965
##
## Model df: 4. Total lags used: 251.8
Forecast for Berkshire Hathaway #2
autoplot(brk.ts)
# The overall trend is postitive with the exception of the stock market crashing in 2015
dec.brk.m <- decompose(brk.ts, type = "multiplicative")
dec.brk.a <- decompose(brk.ts, type = "additive")
autoplot(dec.brk.m)
autoplot(dec.brk.a)
autoplot(seasadj(dec.brk.m)) +
autolayer(seasadj(dec.brk.a)) +
autolayer(brk.ts)
autoplot(BoxCox(brk.ts, lambda = "auto"))
ma.data10 <- ma(brk.ts, 10)
ma.data5 <- ma(brk.ts, 5)
ma.data7 <- ma(brk.ts, 7)
autoplot(brk.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(brk.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(brk.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(brk.ts,20)
autoplot(brk.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(brk.ts,30)
autoplot(brk.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ses.brk <-ses(brk.ts, h = 253)
autoplot(brk.ts)+
autolayer(ses.brk, series = "Pred")
accuracy(ses.brk) #1.352802
## ME RMSE MAE MPE MAPE MASE
## Training set 0.0891468 1.352802 0.9269174 0.05769124 0.65732 0.04494721
## ACF1
## Training set -0.001407929
holt.brk <- holt(brk.ts, h = 253)
autoplot(brk.ts)+
autolayer(holt.brk, series = "Pred")
accuracy(holt.brk) #1.349917
## ME RMSE MAE MPE MAPE
## Training set 0.0006883313 1.349917 0.9231911 -0.006248509 0.6549003
## MASE ACF1
## Training set 0.04476652 0.003328701
ets.brk <- ets(brk.ts, model = "ZZN")
autoplot(brk.ts)+
autolayer(forecast(ets.brk, h = 253), series = "Pred")
accuracy(ets.brk) #1.350675
## ME RMSE MAE MPE MAPE
## Training set -0.007291549 1.350675 0.924139 -0.01221463 0.6555424
## MASE ACF1
## Training set 0.04481248 0.03543922
ets.brk.damp <- ets(brk.ts, model = "ZZN", damped = TRUE)
autoplot(brk.ts)+
autolayer(forecast(ets.brk.damp, h = 253), series = "Pred")
accuracy(ets.brk.damp) #1.352631
## ME RMSE MAE MPE MAPE MASE
## Training set 0.07583992 1.352631 0.9261262 0.04494152 0.6564523 0.04490884
## ACF1
## Training set 0.03353948
ets.brk.box <- ets(brk.ts, model = "ZZN", lambda = "auto")
autoplot(brk.ts)+
autolayer(forecast(ets.brk.box, h = 253), series = "Pred")
accuracy(ets.brk.box) #1.350415
## ME RMSE MAE MPE MAPE
## Training set 0.001352885 1.350415 0.923743 -0.003625606 0.6553926
## MASE ACF1
## Training set 0.04479328 0.01261185
ets.brk.box.damp <- ets(brk.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(brk.ts)+
autolayer(forecast(ets.brk.box.damp, h = 253), series = "Pred")
accuracy(ets.brk.box.damp) #1.352199
## ME RMSE MAE MPE MAPE MASE
## Training set 0.07450476 1.352199 0.9257388 0.04338907 0.6561509 0.04489006
## ACF1
## Training set 0.01511777
stl.brk <- stlf(brk.ts, h = 253)
autoplot(brk.ts)+
autolayer(forecast(stl.brk, h = 253), series = "Pred")
accuracy(stl.brk) #1.200676
## ME RMSE MAE MPE MAPE
## Training set 0.0004600848 1.200676 0.8547287 -0.006059034 0.6081157
## MASE ACF1
## Training set 0.0414467 0.02367755
#This model has the best RMSE
stl.brk.box <- stlf(brk.ts, h = 253, lambda ="auto")
autoplot(brk.ts)+
autolayer(forecast(stl.brk.box, h = 253), series = "Pred")
accuracy(stl.brk.box) #1.204994
## ME RMSE MAE MPE MAPE
## Training set 0.002586323 1.204994 0.8628375 -0.003134245 0.6084131
## MASE ACF1
## Training set 0.04183991 0.02082178
stl.brk.box.rob <- stlf(brk.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(brk.ts)+
autolayer(forecast(stl.brk.box.rob, h = 253), series = "Pred")
accuracy(stl.brk.box.rob) #1.725022
## ME RMSE MAE MPE MAPE MASE
## Training set 0.06755257 1.725022 1.033152 0.0441571 0.7303344 0.05009865
## ACF1
## Training set -0.002205877
checkresiduals(stl.brk)
## Warning in checkresiduals(stl.brk): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(M,A,N)
## Q* = 319.2, df = 247.8, p-value = 0.00148
##
## Model df: 4. Total lags used: 251.8
Forecast for Bank of America #3
autoplot(bac.ts)
# The stock starts out relativaly flat until 2016 where it tanks and then takes off in an upward trend after 2017.
dec.bac.m <- decompose(bac.ts, type = "multiplicative")
dec.bac.a <- decompose(bac.ts, type = "additive")
autoplot(dec.bac.m)
autoplot(dec.bac.a)
autoplot(seasadj(dec.bac.m)) +
autolayer(seasadj(dec.bac.a)) +
autolayer(bac.ts)
autoplot(BoxCox(bac.ts, lambda = "auto"))
ma.data10 <- ma(bac.ts, 10)
ma.data5 <- ma(bac.ts, 5)
ma.data7 <- ma(bac.ts, 7)
autoplot(bac.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(bac.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(bac.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(bac.ts,20)
autoplot(bac.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(bac.ts,30)
autoplot(bac.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ses.bac <-ses(bac.ts, h = 253)
autoplot(bac.ts)+
autolayer(ses.bac, series = "Pred")
accuracy(ses.bac) #0.2894913
## ME RMSE MAE MPE MAPE MASE
## Training set 0.01557334 0.2894913 0.2055194 0.06457473 1.182411 0.05117117
## ACF1
## Training set -0.001485009
holt.bac <- holt(bac.ts, h = 253)
autoplot(bac.ts)+
autolayer(holt.bac, series = "Pred")
accuracy(holt.bac) #0.2891245
## ME RMSE MAE MPE MAPE
## Training set 0.0006435047 0.2891245 0.2051947 -0.02400664 1.181446
## MASE ACF1
## Training set 0.05109034 -0.01228091
ets.bac <- ets(bac.ts, model = "ZZN")
autoplot(bac.ts)+
autolayer(forecast(ets.bac, h = 253), series = "Pred")
accuracy(ets.bac) #0.2894915
## ME RMSE MAE MPE MAPE MASE
## Training set 0.01555853 0.2894915 0.205522 0.06452518 1.182428 0.05117182
## ACF1
## Training set -0.002624499
ets.bac.damp <- ets(bac.ts, model = "ZZN", damped = TRUE)
autoplot(bac.ts)+
autolayer(forecast(ets.bac.damp, h = 253), series = "Pred")
accuracy(ets.bac.damp) #0.2894905
## ME RMSE MAE MPE MAPE MASE
## Training set 0.01537208 0.2894905 0.2055174 0.06305078 1.182403 0.05117068
## ACF1
## Training set -0.0028925
ets.bac.box <- ets(bac.ts, model = "ZZN", lambda = "auto")
autoplot(bac.ts)+
autolayer(forecast(ets.bac.box, h = 253), series = "Pred")
accuracy(ets.bac.box) #0.2894916
## ME RMSE MAE MPE MAPE MASE
## Training set 0.01556408 0.2894916 0.2055199 0.06453975 1.182411 0.05117128
## ACF1
## Training set -0.002187907
ets.bac.box.damp <- ets(bac.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(bac.ts)+
autolayer(forecast(ets.bac.box.damp, h = 253), series = "Pred")
accuracy(ets.bac.box.damp) #0.2894071
## ME RMSE MAE MPE MAPE MASE
## Training set 0.01194615 0.2894071 0.2054419 0.04579265 1.182579 0.05115186
## ACF1
## Training set 0.005360752
stl.bac <- stlf(bac.ts, h = 253)
autoplot(bac.ts)+
autolayer(forecast(stl.bac, h = 253), series = "Pred")
accuracy(stl.bac) #0.2595154
## ME RMSE MAE MPE MAPE
## Training set 0.0004158757 0.2595154 0.1876773 -0.02133816 1.084892
## MASE ACF1
## Training set 0.04672876 0.005999475
stl.bac.box <- stlf(bac.ts, h = 253, lambda ="auto")
autoplot(bac.ts)+
autolayer(forecast(stl.bac.box, h = 253), series = "Pred")
accuracy(stl.bac.box) #0.2593555
## ME RMSE MAE MPE MAPE
## Training set 0.0006780477 0.2593555 0.1875714 -0.01970162 1.083757
## MASE ACF1
## Training set 0.04670239 0.006865843
#This model has the best RMSE
stl.bac.box.rob <- stlf(bac.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(bac.ts)+
autolayer(forecast(stl.bac.box.rob, h = 253), series = "Pred")
accuracy(stl.bac.box.rob) #0.3044145
## ME RMSE MAE MPE MAPE MASE
## Training set 0.01533502 0.3044145 0.2171423 0.07133869 1.266814 0.0540651
## ACF1
## Training set 0.001028607
checkresiduals(stl.bac)
## Warning in checkresiduals(stl.bac): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(M,A,N)
## Q* = 325.5, df = 247.8, p-value = 0.0006644
##
## Model df: 4. Total lags used: 251.8
Forecast for Wells Fargo #4
autoplot(wfc.ts)
# They have an upward trend until the stock market crash in 2015 and then it bounces back and forth of going up and down.
dec.wfc.m <- decompose(wfc.ts, type = "multiplicative")
dec.wfc.a <- decompose(wfc.ts, type = "additive")
autoplot(dec.wfc.m)
autoplot(dec.wfc.a)
autoplot(seasadj(dec.wfc.m)) +
autolayer(seasadj(dec.wfc.a)) +
autolayer(wfc.ts)
autoplot(BoxCox(wfc.ts, lambda = "auto"))
ma.data10 <- ma(wfc.ts, 10)
ma.data5 <- ma(wfc.ts, 5)
ma.data7 <- ma(wfc.ts, 7)
autoplot(wfc.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(wfc.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(wfc.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(wfc.ts,20)
autoplot(wfc.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(wfc.ts,30)
autoplot(wfc.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ses.wfc <-ses(wfc.ts, h = 253)
autoplot(wfc.ts)+
autolayer(ses.wfc, series = "Pred")
accuracy(ses.wfc) #0.6367074
## ME RMSE MAE MPE MAPE
## Training set 0.01792604 0.6367074 0.4366768 0.03209776 0.8660263
## MASE ACF1
## Training set 0.06551961 -0.0005144099
holt.wfc <- holt(wfc.ts, h = 253)
autoplot(wfc.ts)+
autolayer(holt.wfc, series = "Pred")
accuracy(holt.wfc) #0.6375695
## ME RMSE MAE MPE MAPE
## Training set 0.0008983491 0.6375695 0.4365837 -0.001222792 0.8667634
## MASE ACF1
## Training set 0.06550565 0.001494138
ets.wfc <- ets(wfc.ts, model = "ZZN")
autoplot(wfc.ts)+
autolayer(forecast(ets.wfc, h = 253), series = "Pred")
accuracy(ets.wfc) #0.6368002
## ME RMSE MAE MPE MAPE
## Training set 0.01824742 0.6368002 0.4371215 0.03267985 0.8669979
## MASE ACF1
## Training set 0.06558633 0.0164777
ets.wfc.damp <- ets(wfc.ts, model = "ZZN", damped = TRUE)
autoplot(wfc.ts)+
autolayer(forecast(ets.wfc.damp, h = 253), series = "Pred")
accuracy(ets.wfc.damp) #0.6366146
## ME RMSE MAE MPE MAPE
## Training set 0.01465285 0.6366146 0.4364938 0.02302205 0.8653486
## MASE ACF1
## Training set 0.06549214 0.01648453
ets.wfc.box <- ets(wfc.ts, model = "ZZN", lambda = "auto")
autoplot(wfc.ts)+
autolayer(forecast(ets.wfc.box, h = 253), series = "Pred")
accuracy(ets.wfc.box) #0.6367178
## ME RMSE MAE MPE MAPE
## Training set 0.01799884 0.6367178 0.4367245 0.03223467 0.8661348
## MASE ACF1
## Training set 0.06552677 0.001924675
ets.wfc.box.damp <- ets(wfc.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(wfc.ts)+
autolayer(forecast(ets.wfc.box.damp, h = 253), series = "Pred")
accuracy(ets.wfc.box.damp) #0.6366498
## ME RMSE MAE MPE MAPE
## Training set 0.01700988 0.6366498 0.4364735 0.02945242 0.8654345
## MASE ACF1
## Training set 0.06548911 0.001742037
stl.wfc <- stlf(wfc.ts, h = 253)
autoplot(wfc.ts)+
autolayer(forecast(stl.wfc, h = 253), series = "Pred")
accuracy(stl.wfc) #0.5622515
## ME RMSE MAE MPE MAPE
## Training set 0.01766315 0.5622515 0.3971173 0.03245101 0.7896173
## MASE ACF1
## Training set 0.05958405 0.01053205
#This model has the best RMSE
stl.wfc.box <- stlf(wfc.ts, h = 253, lambda ="auto")
autoplot(wfc.ts)+
autolayer(forecast(stl.wfc.box, h = 253), series = "Pred")
accuracy(stl.wfc.box) #0.5609947
## ME RMSE MAE MPE MAPE MASE
## Training set 0.01772985 0.5609947 0.396204 0.03262945 0.7865213 0.05944702
## ACF1
## Training set 0.009741254
stl.wfc.box.rob <- stlf(wfc.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(wfc.ts)+
autolayer(forecast(stl.wfc.box.rob, h = 253), series = "Pred")
accuracy(stl.wfc.box.rob) #0.631246
## ME RMSE MAE MPE MAPE MASE
## Training set 0.01856614 0.631246 0.4349781 0.03313418 0.8618055 0.06526474
## ACF1
## Training set 0.01754645
checkresiduals(stl.wfc)
## Warning in checkresiduals(stl.wfc): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(M,N,N)
## Q* = 334.85, df = 249.8, p-value = 0.0002583
##
## Model df: 2. Total lags used: 251.8
Forecast for Apple #5
autoplot(aapl.ts)
# The overall trend is an upward trend until the stock market crash and then after they recover, the stock continues on the upward trend.
dec.aapl.m <- decompose(aapl.ts, type = "multiplicative")
dec.aapl.a <- decompose(aapl.ts, type = "additive")
autoplot(dec.aapl.m)
autoplot(dec.aapl.a)
autoplot(seasadj(dec.aapl.m)) +
autolayer(seasadj(dec.aapl.a)) +
autolayer(aapl.ts)
autoplot(BoxCox(aapl.ts, lambda = "auto"))
ma.data10 <- ma(aapl.ts, 10)
ma.data5 <- ma(aapl.ts, 5)
ma.data7 <- ma(aapl.ts, 7)
autoplot(aapl.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(aapl.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(aapl.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(aapl.ts,20)
autoplot(aapl.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(aapl.ts,30)
autoplot(aapl.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ses.aapl <-ses(aapl.ts, h = 253)
autoplot(aapl.ts)+
autolayer(ses.aapl, series = "Pred")
accuracy(ses.aapl) #1.726283
## ME RMSE MAE MPE MAPE MASE
## Training set 0.08543572 1.726283 1.166321 0.06419552 1.107362 0.03852574
## ACF1
## Training set -0.003907581
holt.aapl <- holt(aapl.ts, h = 253)
autoplot(aapl.ts)+
autolayer(holt.aapl, series = "Pred")
accuracy(holt.aapl) #1.724785
## ME RMSE MAE MPE MAPE MASE
## Training set 0.001507313 1.724785 1.162588 -0.01982149 1.104809 0.03840245
## ACF1
## Training set -0.003821166
ets.aapl <- ets(aapl.ts, model = "ZZN")
autoplot(aapl.ts)+
autolayer(forecast(ets.aapl, h = 253), series = "Pred")
accuracy(ets.aapl) #1.726622
## ME RMSE MAE MPE MAPE MASE
## Training set 0.08726582 1.726622 1.168039 0.06566524 1.109124 0.03858249
## ACF1
## Training set 0.01556077
ets.aapl.damp <- ets(aapl.ts, model = "ZZN", damped = TRUE)
autoplot(aapl.ts)+
autolayer(forecast(ets.aapl.damp, h = 253), series = "Pred")
accuracy(ets.aapl.damp) #1.726265
## ME RMSE MAE MPE MAPE MASE
## Training set 0.09280153 1.726265 1.168219 0.07412299 1.10922 0.03858844
## ACF1
## Training set 0.016452
ets.aapl.box <- ets(aapl.ts, model = "ZZN", lambda = "auto")
autoplot(aapl.ts)+
autolayer(forecast(ets.aapl.box, h = 253), series = "Pred")
accuracy(ets.aapl.box) #1.726265
## ME RMSE MAE MPE MAPE MASE
## Training set 0.08537271 1.726265 1.166291 0.06413976 1.107332 0.03852477
## ACF1
## Training set -0.004245886
ets.aapl.box.damp <- ets(aapl.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(aapl.ts)+
autolayer(forecast(ets.aapl.box.damp, h = 253), series = "Pred")
accuracy(ets.aapl.box.damp) #1.725536
## ME RMSE MAE MPE MAPE MASE
## Training set 0.08075686 1.725536 1.165449 0.06344666 1.107029 0.03849693
## ACF1
## Training set -0.003092723
stl.aapl <- stlf(aapl.ts, h = 253)
autoplot(aapl.ts)+
autolayer(forecast(stl.aapl, h = 253), series = "Pred")
accuracy(stl.aapl) #1.524812
## ME RMSE MAE MPE MAPE MASE
## Training set 0.08787856 1.524812 1.086323 0.06762393 1.048568 0.03588327
## ACF1
## Training set -0.003333371
#This model has the best RMSE
stl.aapl.box <- stlf(aapl.ts, h = 253, lambda ="auto")
autoplot(aapl.ts)+
autolayer(forecast(stl.aapl.box, h = 253), series = "Pred")
accuracy(stl.aapl.box) #1.525369
## ME RMSE MAE MPE MAPE MASE
## Training set 0.08777088 1.525369 1.087058 0.06752383 1.049906 0.03590755
## ACF1
## Training set -0.003864088
stl.aapl.box.rob <- stlf(aapl.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(aapl.ts)+
autolayer(forecast(stl.aapl.box.rob, h = 253), series = "Pred")
accuracy(stl.aapl.box.rob) #1.848222
## ME RMSE MAE MPE MAPE MASE
## Training set 0.09525599 1.848222 1.26734 0.07489443 1.220853 0.0418626
## ACF1
## Training set -0.002173559
checkresiduals(stl.aapl)
## Warning in checkresiduals(stl.aapl): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(A,N,N)
## Q* = 283, df = 249.8, p-value = 0.07304
##
## Model df: 2. Total lags used: 251.8
Forecast for Exxon Mobile #6
autoplot(xom.ts)
# They overall trend of this stock is a downward trend, with the exception of a spike mid 2015 that they go on for a year and then a slight downward trend sets back in
dec.xom.m <- decompose(xom.ts, type = "multiplicative")
dec.xom.a <- decompose(xom.ts, type = "additive")
autoplot(dec.xom.m)
autoplot(dec.xom.a)
autoplot(seasadj(dec.xom.m)) +
autolayer(seasadj(dec.xom.a)) +
autolayer(xom.ts)
autoplot(BoxCox(xom.ts, lambda = "auto"))
ma.data10 <- ma(xom.ts, 10)
ma.data5 <- ma(xom.ts, 5)
ma.data7 <- ma(xom.ts, 7)
autoplot(xom.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(xom.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(xom.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(xom.ts,20)
autoplot(xom.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(xom.ts,30)
autoplot(xom.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ses.xom <-ses(xom.ts, h = 253)
autoplot(xom.ts)+
autolayer(ses.xom, series = "Pred")
accuracy(ses.xom) #0.913185
## ME RMSE MAE MPE MAPE
## Training set -0.00822766 0.913185 0.6717947 -0.01583679 0.7765014
## MASE ACF1
## Training set 0.07437499 5.081122e-06
holt.xom <- holt(xom.ts, h = 253)
autoplot(xom.ts)+
autolayer(holt.xom, series = "Pred")
accuracy(holt.xom) #0.9135467
## ME RMSE MAE MPE MAPE
## Training set 0.003843153 0.9135467 0.6724389 -0.002011962 0.7771485
## MASE ACF1
## Training set 0.07444631 0.01427258
ets.xom <- ets(xom.ts, model = "ZZN")
autoplot(xom.ts)+
autolayer(forecast(ets.xom, h = 253), series = "Pred")
accuracy(ets.xom) #0.913185
## ME RMSE MAE MPE MAPE
## Training set -0.008229432 0.913185 0.6717953 -0.01583776 0.7765021
## MASE ACF1
## Training set 0.07437506 -0.0001589134
ets.xom.damp <- ets(xom.ts, model = "ZZN", damped = TRUE)
autoplot(xom.ts)+
autolayer(forecast(ets.xom.damp, h = 253), series = "Pred")
accuracy(ets.xom.damp) #0.9132083
## ME RMSE MAE MPE MAPE
## Training set -0.008528862 0.9132083 0.6720411 -0.01617595 0.7767807
## MASE ACF1
## Training set 0.07440227 9.539692e-05
ets.xom.box <- ets(xom.ts, model = "ZZN", lambda = "auto")
autoplot(xom.ts)+
autolayer(forecast(ets.xom.box, h = 253), series = "Pred")
accuracy(ets.xom.box) #0.9131825
## ME RMSE MAE MPE MAPE
## Training set -0.00824102 0.9131825 0.6718021 -0.01585029 0.7765099
## MASE ACF1
## Training set 0.07437581 -0.0003781224
ets.xom.box.damp <- ets(xom.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(xom.ts)+
autolayer(forecast(ets.xom.box.damp, h = 253), series = "Pred")
accuracy(ets.xom.box.damp) #0.9131957
## ME RMSE MAE MPE MAPE
## Training set -0.008123658 0.9131957 0.6719129 -0.01571919 0.7766348
## MASE ACF1
## Training set 0.07438807 9.616529e-05
stl.xom <- stlf(xom.ts, h = 253)
autoplot(xom.ts)+
autolayer(forecast(stl.xom, h = 253), series = "Pred")
accuracy(stl.xom) #0.79259
## ME RMSE MAE MPE MAPE
## Training set -0.007396764 0.79259 0.6104887 -0.01315696 0.7053574
## MASE ACF1
## Training set 0.06758775 5.189526e-05
stl.xom.box <- stlf(xom.ts, h = 253, lambda ="auto")
autoplot(xom.ts)+
autolayer(forecast(stl.xom.box, h = 253), series = "Pred")
accuracy(stl.xom.box) #0.7922227
## ME RMSE MAE MPE MAPE
## Training set -0.007389485 0.7922227 0.6102358 -0.01311804 0.7051523
## MASE ACF1
## Training set 0.06755975 -0.0006928527
#This model has the best RMSE
stl.xom.box.rob <- stlf(xom.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(xom.ts)+
autolayer(forecast(stl.xom.box.rob, h = 253), series = "Pred")
accuracy(stl.xom.box.rob) #0.9200737
## ME RMSE MAE MPE MAPE
## Training set -0.008084833 0.9200737 0.6678367 -0.0146773 0.7752702
## MASE ACF1
## Training set 0.0739368 -0.003933122
checkresiduals(stl.xom)
## Warning in checkresiduals(stl.xom): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(A,N,N)
## Q* = 286.42, df = 249.8, p-value = 0.05548
##
## Model df: 2. Total lags used: 251.8
Forecast for AT&T #7
autoplot(at.ts)
# The stock looks to have a slight downward trend until 2016 and then it is just volatile.
dec.at.m <- decompose(at.ts, type = "multiplicative")
dec.at.a <- decompose(at.ts, type = "additive")
autoplot(dec.at.m)
autoplot(dec.at.a)
autoplot(seasadj(dec.at.m)) +
autolayer(seasadj(dec.at.a)) +
autolayer(at.ts)
autoplot(BoxCox(at.ts, lambda = "auto"))
ma.data10 <- ma(at.ts, 10)
ma.data5 <- ma(at.ts, 5)
ma.data7 <- ma(at.ts, 7)
autoplot(at.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(at.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(at.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(at.ts,20)
autoplot(at.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(at.ts,30)
autoplot(at.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ses.at <-ses(at.ts, h = 253)
autoplot(at.ts)+
autolayer(ses.at, series = "Pred")
accuracy(ses.at) #0.3599087
## ME RMSE MAE MPE MAPE
## Training set 0.001134779 0.3599087 0.2675956 -0.001845054 0.7387043
## MASE ACF1
## Training set 0.0910768 -0.0001102851
holt.at <- holt(at.ts, h = 253)
autoplot(at.ts)+
autolayer(holt.at, series = "Pred")
accuracy(holt.at) #0.360435
## ME RMSE MAE MPE MAPE
## Training set 0.0004025651 0.360435 0.2681117 -0.0038267 0.7401639
## MASE ACF1
## Training set 0.09125247 0.008940948
ets.at <- ets(at.ts, model = "ZZN")
autoplot(at.ts)+
autolayer(forecast(ets.at, h = 253), series = "Pred")
accuracy(ets.at) #0.3599087
## ME RMSE MAE MPE MAPE
## Training set 0.001134219 0.3599087 0.267595 -0.001847275 0.7387024
## MASE ACF1
## Training set 0.09107658 2.296121e-05
ets.at.damp <- ets(at.ts, model = "ZZN", damped = TRUE)
autoplot(at.ts)+
autolayer(forecast(ets.at.damp, h = 253), series = "Pred")
accuracy(ets.at.damp) #0.3598988
## ME RMSE MAE MPE MAPE
## Training set 0.0009938112 0.3598988 0.2676051 -0.002144604 0.7387338
## MASE ACF1
## Training set 0.09108002 6.638831e-05
ets.at.box <- ets(at.ts, model = "ZZN", lambda = "auto")
autoplot(at.ts)+
autolayer(forecast(ets.at.box, h = 253), series = "Pred")
accuracy(ets.at.box) #0.3600021
## ME RMSE MAE MPE MAPE
## Training set 0.001262659 0.3600021 0.2676226 -0.001599428 0.7387381
## MASE ACF1
## Training set 0.09108599 0.0220096
ets.at.box.damp <- ets(at.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(at.ts)+
autolayer(forecast(ets.at.box.damp, h = 253), series = "Pred")
accuracy(ets.at.box.damp) #0.3599452
## ME RMSE MAE MPE MAPE
## Training set -7.81875e-05 0.3599452 0.2675459 -0.005288981 0.7385504
## MASE ACF1
## Training set 0.09105988 0.02135713
stl.at <- stlf(at.ts, h = 253)
autoplot(at.ts)+
autolayer(forecast(stl.at, h = 253), series = "Pred")
accuracy(stl.at) #0.3136154
## ME RMSE MAE MPE MAPE
## Training set 0.0008430593 0.3136154 0.2377149 -0.00175276 0.6567895
## MASE ACF1
## Training set 0.08090683 0.0004429256
#This model has the best RMSE
stl.at.box <- stlf(at.ts, h = 253, lambda ="auto")
autoplot(at.ts)+
autolayer(forecast(stl.at.box, h = 253), series = "Pred")
accuracy(stl.at.box) #0.3156268
## ME RMSE MAE MPE MAPE
## Training set 0.001016528 0.3156268 0.2386727 -0.001156732 0.6575243
## MASE ACF1
## Training set 0.08123284 0.01044353
stl.at.box.rob <- stlf(at.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(at.ts)+
autolayer(forecast(stl.at.box.rob, h = 253), series = "Pred")
accuracy(stl.at.box.rob) #0.3420974
## ME RMSE MAE MPE MAPE
## Training set 0.0005684103 0.3420974 0.257366 -0.00234035 0.708907
## MASE ACF1
## Training set 0.08759513 0.005578003
checkresiduals(stl.at)
## Warning in checkresiduals(stl.at): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(A,N,N)
## Q* = 307.67, df = 249.8, p-value = 0.00731
##
## Model df: 2. Total lags used: 251.8
Forecast for Verizon #8
autoplot(vz.ts)
# This stock is volatile with a slight downward trend and then a light upward trend starting mid way in 2017.
dec.vz.m <- decompose(vz.ts, type = "multiplicative")
dec.vz.a <- decompose(vz.ts, type = "additive")
autoplot(dec.vz.m)
autoplot(dec.vz.a)
autoplot(seasadj(dec.vz.m)) +
autolayer(seasadj(dec.vz.a)) +
autolayer(vz.ts)
autoplot(BoxCox(vz.ts, lambda = "auto"))
ma.data10 <- ma(vz.ts, 10)
ma.data5 <- ma(vz.ts, 5)
ma.data7 <- ma(vz.ts, 7)
autoplot(vz.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(vz.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(vz.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(vz.ts,20)
autoplot(vz.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(vz.ts,30)
autoplot(vz.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ses.vz <-ses(vz.ts, h = 253)
autoplot(vz.ts)+
autolayer(ses.vz, series = "Pred")
accuracy(ses.vz) #0.5093928
## ME RMSE MAE MPE MAPE
## Training set 0.004812963 0.5093928 0.3780704 0.004684639 0.7737266
## MASE ACF1
## Training set 0.1149017 0.01457992
holt.vz <- holt(vz.ts, h = 253)
autoplot(vz.ts)+
autolayer(holt.vz, series = "Pred")
accuracy(holt.vz) #0.5097683
## ME RMSE MAE MPE MAPE
## Training set 0.0001183236 0.5097683 0.3785599 -0.004996432 0.7748737
## MASE ACF1
## Training set 0.1150504 0.0149699
ets.vz <- ets(vz.ts, model = "ZZN")
autoplot(vz.ts)+
autolayer(forecast(ets.vz, h = 253), series = "Pred")
accuracy(ets.vz) #0.5093928
## ME RMSE MAE MPE MAPE
## Training set 0.004812963 0.5093928 0.3780704 0.004684639 0.7737266
## MASE ACF1
## Training set 0.1149017 0.01457992
ets.vz.damp <- ets(vz.ts, model = "ZZN", damped = TRUE)
autoplot(vz.ts)+
autolayer(forecast(ets.vz.damp, h = 253), series = "Pred")
accuracy(ets.vz.damp) #0.5093039
## ME RMSE MAE MPE MAPE
## Training set 0.003872072 0.5093039 0.3779131 0.003001618 0.773452
## MASE ACF1
## Training set 0.1148539 0.002824265
ets.vz.box <- ets(vz.ts, model = "ZZN", lambda = "auto")
autoplot(vz.ts)+
autolayer(forecast(ets.vz.box, h = 253), series = "Pred")
accuracy(ets.vz.box) #0.50939
## ME RMSE MAE MPE MAPE MASE
## Training set 0.004862705 0.50939 0.3780231 0.004796356 0.7736205 0.1148873
## ACF1
## Training set 0.01454079
ets.vz.box.damp <- ets(vz.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(vz.ts)+
autolayer(forecast(ets.vz.box.damp, h = 253), series = "Pred")
accuracy(ets.vz.box.damp) #0.5092634
## ME RMSE MAE MPE MAPE
## Training set 0.003878214 0.5092634 0.3777346 0.003022218 0.7730469
## MASE ACF1
## Training set 0.1147996 0.003983157
stl.vz <- stlf(vz.ts, h = 253)
autoplot(vz.ts)+
autolayer(forecast(stl.vz, h = 253), series = "Pred")
accuracy(stl.vz) #0.446133
## ME RMSE MAE MPE MAPE
## Training set 0.003923409 0.446133 0.3372041 0.003990434 0.6903078
## MASE ACF1
## Training set 0.1024818 0.006692068
#This model has the best RMSE
stl.vz.box <- stlf(vz.ts, h = 253, lambda ="auto")
autoplot(vz.ts)+
autolayer(forecast(stl.vz.box, h = 253), series = "Pred")
accuracy(stl.vz.box) #0.4463567
## ME RMSE MAE MPE MAPE
## Training set 0.00391696 0.4463567 0.3372087 0.004001496 0.6900439
## MASE ACF1
## Training set 0.1024831 0.006807782
stl.vz.box.rob <- stlf(vz.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(vz.ts)+
autolayer(forecast(stl.vz.box.rob, h = 253), series = "Pred")
accuracy(stl.vz.box.rob) #0.4709856
## ME RMSE MAE MPE MAPE
## Training set 0.004163026 0.4709856 0.3519572 0.004370199 0.7202418
## MASE ACF1
## Training set 0.1069654 -0.002469184
checkresiduals(stl.vz)
## Warning in checkresiduals(stl.vz): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(A,N,N)
## Q* = 254.31, df = 249.8, p-value = 0.4089
##
## Model df: 2. Total lags used: 251.8
Forecast for Microsoft #9
autoplot(mfst.ts)
# They have a very prominate upward trend with little volatility
dec.mfst.m <- decompose(mfst.ts, type = "multiplicative")
dec.mfst.a <- decompose(mfst.ts, type = "additive")
autoplot(dec.mfst.m)
autoplot(dec.mfst.a)
autoplot(seasadj(dec.mfst.m)) +
autolayer(seasadj(dec.mfst.a)) +
autolayer(mfst.ts)
autoplot(BoxCox(mfst.ts, lambda = "auto"))
ma.data10 <- ma(mfst.ts, 10)
ma.data5 <- ma(mfst.ts, 5)
ma.data7 <- ma(mfst.ts, 7)
autoplot(mfst.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(mfst.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(mfst.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(mfst.ts,20)
autoplot(mfst.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(mfst.ts,30)
autoplot(mfst.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ses.mfst <-ses(mfst.ts, h = 253)
autoplot(mfst.ts)+
autolayer(ses.mfst, series = "Pred")
accuracy(ses.mfst) #0.7111658
## ME RMSE MAE MPE MAPE
## Training set 0.05269465 0.7111658 0.4766871 0.08915778 0.9672856
## MASE ACF1
## Training set 0.04527128 -0.004263744
holt.mfst <- holt(mfst.ts, h = 253)
autoplot(mfst.ts)+
autolayer(holt.mfst, series = "Pred")
accuracy(holt.mfst) #0.7096357
## ME RMSE MAE MPE MAPE
## Training set -9.330011e-05 0.7096357 0.4756513 -0.0243399 0.9673952
## MASE ACF1
## Training set 0.04517291 0.003989118
ets.mfst <- ets(mfst.ts, model = "ZZN")
autoplot(mfst.ts)+
autolayer(forecast(ets.mfst, h = 253), series = "Pred")
accuracy(ets.mfst) #0.7092968
## ME RMSE MAE MPE MAPE
## Training set 0.0005108177 0.7092968 0.4752299 -0.02209723 0.9656994
## MASE ACF1
## Training set 0.04513289 -0.005982641
ets.mfst.damp <- ets(mfst.ts, model = "ZZN", damped = TRUE)
autoplot(mfst.ts)+
autolayer(forecast(ets.mfst.damp, h = 253), series = "Pred")
accuracy(ets.mfst.damp) #0.7111353
## ME RMSE MAE MPE MAPE
## Training set 0.05133895 0.7111353 0.4765259 0.08480577 0.9665147
## MASE ACF1
## Training set 0.04525597 -0.00879819
ets.mfst.box <- ets(mfst.ts, model = "ZZN", lambda = "auto")
autoplot(mfst.ts)+
autolayer(forecast(ets.mfst.box, h = 253), series = "Pred")
accuracy(ets.mfst.box) #0.7093602
## ME RMSE MAE MPE MAPE
## Training set -0.001066889 0.7093602 0.4748914 -0.012584 0.9648994
## MASE ACF1
## Training set 0.04510074 -0.02010114
ets.mfst.box.damp <- ets(mfst.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(mfst.ts)+
autolayer(forecast(ets.mfst.box.damp, h = 253), series = "Pred")
accuracy(ets.mfst.box.damp) #0.7111869
## ME RMSE MAE MPE MAPE
## Training set 0.04558802 0.7111869 0.4768525 0.07353592 0.9678782
## MASE ACF1
## Training set 0.04528699 -0.01114846
stl.mfst <- stlf(mfst.ts, h = 253)
autoplot(mfst.ts)+
autolayer(forecast(stl.mfst, h = 253), series = "Pred")
accuracy(stl.mfst) #0.6310272
## ME RMSE MAE MPE MAPE
## Training set 0.05121482 0.6310272 0.4366059 0.08697857 0.8965748
## MASE ACF1
## Training set 0.04146474 -0.004317787
#This model has the best RMSE
stl.mfst.box <- stlf(mfst.ts, h = 253, lambda ="auto")
autoplot(mfst.ts)+
autolayer(forecast(stl.mfst.box, h = 253), series = "Pred")
accuracy(stl.mfst.box) #0.6642793
## ME RMSE MAE MPE MAPE
## Training set 0.001442167 0.6642793 0.4544004 -0.00818575 0.8983631
## MASE ACF1
## Training set 0.0431547 -0.04372057
stl.mfst.box.rob <- stlf(mfst.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(mfst.ts)+
autolayer(forecast(stl.mfst.box.rob, h = 253), series = "Pred")
accuracy(stl.mfst.box.rob) #0.7778222
## ME RMSE MAE MPE MAPE
## Training set 0.0004765442 0.7778222 0.5072073 -0.01174891 1.004141
## MASE ACF1
## Training set 0.0481698 -0.02051611
checkresiduals(stl.mfst)
## Warning in checkresiduals(stl.mfst): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(A,N,N)
## Q* = 318.57, df = 249.8, p-value = 0.002106
##
## Model df: 2. Total lags used: 251.8
Forecast for Chevron #10
autoplot(cvx.ts)
# They hve a noticable downward trend until 2016 when it turns around and goes on the up and up.
dec.cvx.m <- decompose(cvx.ts, type = "multiplicative")
dec.cvx.a <- decompose(cvx.ts, type = "additive")
autoplot(dec.cvx.m)
autoplot(dec.cvx.a)
autoplot(seasadj(dec.cvx.m)) +
autolayer(seasadj(dec.cvx.a)) +
autolayer(cvx.ts)
autoplot(BoxCox(cvx.ts, lambda = "auto"))
ma.data10 <- ma(cvx.ts, 10)
ma.data5 <- ma(cvx.ts, 5)
ma.data7 <- ma(cvx.ts, 7)
autoplot(cvx.ts) +
autolayer(ma.data10, series = "MA-10")
## Warning: Removed 10 rows containing missing values (geom_path).
autoplot(cvx.ts) +
autolayer(ma.data5, series = "MA-5")
## Warning: Removed 4 rows containing missing values (geom_path).
autoplot(cvx.ts) +
autolayer(ma.data7, series = "MA-7")
## Warning: Removed 6 rows containing missing values (geom_path).
ma.data20 <- ma(cvx.ts,20)
autoplot(cvx.ts) +
autolayer(ma.data20, series = "MA-20")
## Warning: Removed 20 rows containing missing values (geom_path).
ma.data30 <- ma(cvx.ts,30)
autoplot(cvx.ts) +
autolayer(ma.data30, series = "MA-30")
## Warning: Removed 30 rows containing missing values (geom_path).
ses.cvx <-ses(cvx.ts, h = 253)
autoplot(cvx.ts)+
autolayer(ses.cvx, series = "Pred")
accuracy(ses.cvx) #1.307388
## ME RMSE MAE MPE MAPE
## Training set 0.001961413 1.307388 0.9600614 -0.006857314 0.9148865
## MASE ACF1
## Training set 0.06645329 0.009543432
holt.cvx <- holt(cvx.ts, h = 253)
autoplot(cvx.ts)+
autolayer(holt.cvx, series = "Pred")
accuracy(holt.cvx) #1.307451
## ME RMSE MAE MPE MAPE
## Training set 0.0003497412 1.307451 0.9600609 -0.008189564 0.9148942
## MASE ACF1
## Training set 0.06645325 0.00953449
ets.cvx <- ets(cvx.ts, model = "ZZN")
autoplot(cvx.ts)+
autolayer(forecast(ets.cvx, h = 253), series = "Pred")
accuracy(ets.cvx) #1.307388
## ME RMSE MAE MPE MAPE
## Training set 0.001961413 1.307388 0.9600614 -0.006857314 0.9148865
## MASE ACF1
## Training set 0.06645329 0.009543432
ets.cvx.damp <- ets(cvx.ts, model = "ZZN", damped = TRUE)
autoplot(cvx.ts)+
autolayer(forecast(ets.cvx.damp, h = 253), series = "Pred")
accuracy(ets.cvx.damp) #1.307414
## ME RMSE MAE MPE MAPE
## Training set 0.0008808311 1.307414 0.9602617 -0.007769016 0.9150651
## MASE ACF1
## Training set 0.06646716 0.009218973
ets.cvx.box <- ets(cvx.ts, model = "ZZN", lambda = "auto")
autoplot(cvx.ts)+
autolayer(forecast(ets.cvx.box, h = 253), series = "Pred")
accuracy(ets.cvx.box) #1.307475
## ME RMSE MAE MPE MAPE
## Training set 0.00153528 1.307475 0.9604866 -0.007228209 0.9152564
## MASE ACF1
## Training set 0.06648272 0.009354912
ets.cvx.box.damp <- ets(cvx.ts, model = "ZZN", lambda = "auto", damped = TRUE)
autoplot(cvx.ts)+
autolayer(forecast(ets.cvx.box.damp, h = 253), series = "Pred")
accuracy(ets.cvx.box.damp) #1.307926
## ME RMSE MAE MPE MAPE
## Training set 0.00134101 1.307926 0.9597846 -0.006836558 0.9147939
## MASE ACF1
## Training set 0.06643413 -0.003877792
stl.cvx <- stlf(cvx.ts, h = 253)
autoplot(cvx.ts)+
autolayer(forecast(stl.cvx, h = 253), series = "Pred")
accuracy(stl.cvx) #1.148726
## ME RMSE MAE MPE MAPE
## Training set 0.001406219 1.148726 0.862867 -0.005127043 0.815666
## MASE ACF1
## Training set 0.05972571 0.003100701
stl.cvx.box <- stlf(cvx.ts, h = 253, lambda ="auto")
autoplot(cvx.ts)+
autolayer(forecast(stl.cvx.box, h = 253), series = "Pred")
accuracy(stl.cvx.box) #1.139189
## ME RMSE MAE MPE MAPE
## Training set 0.001550476 1.139189 0.8496403 -0.004391602 0.8062989
## MASE ACF1
## Training set 0.05881019 0.01098219
#This model has the best RMSE
stl.cvx.box.rob <- stlf(cvx.ts, h = 253, lambda ="auto", robust = TRUE)
autoplot(cvx.ts)+
autolayer(forecast(stl.cvx.box.rob, h = 253), series = "Pred")
accuracy(stl.cvx.box.rob) #1.647481
## ME RMSE MAE MPE MAPE
## Training set 0.005343148 1.647481 1.083017 -0.002915733 1.036786
## MASE ACF1
## Training set 0.07496399 -0.03212193
checkresiduals(stl.cvx)
## Warning in checkresiduals(stl.cvx): The fitted degrees of freedom is based
## on the model used for the seasonally adjusted data.
##
## Ljung-Box test
##
## data: Residuals from STL + ETS(A,N,N)
## Q* = 303.51, df = 249.8, p-value = 0.01133
##
## Model df: 2. Total lags used: 251.8
The best models for each company stock
#1 JP Morgan
autoplot(jp.ts)
stl.jp <- stlf(jp.ts, h = 253)
autoplot(jp.ts)+
autolayer(forecast(stl.jp, h = 253), series = "Pred")
accuracy(stl.jp) #0.7836143
## ME RMSE MAE MPE MAPE
## Training set 0.006239975 0.7836143 0.5711303 -0.00882056 0.862666
## MASE ACF1
## Training set 0.05024296 0.01849409
#2 Berkshire Hathaway
autoplot(brk.ts)
stl.brk <- stlf(brk.ts, h = 253)
autoplot(brk.ts)+
autolayer(forecast(stl.brk, h = 253), series = "Pred")
accuracy(stl.brk) #1.200676
## ME RMSE MAE MPE MAPE
## Training set 0.0004600848 1.200676 0.8547287 -0.006059034 0.6081157
## MASE ACF1
## Training set 0.0414467 0.02367755
#3 Bank of America
autoplot(bac.ts)
stl.bac.box <- stlf(bac.ts, h = 253, lambda ="auto")
autoplot(bac.ts)+
autolayer(forecast(stl.bac.box, h = 253), series = "Pred")
accuracy(stl.bac.box) #0.2593555
## ME RMSE MAE MPE MAPE
## Training set 0.0006780477 0.2593555 0.1875714 -0.01970162 1.083757
## MASE ACF1
## Training set 0.04670239 0.006865843
#4 Wells Fargo
autoplot(wfc.ts)
stl.wfc <- stlf(wfc.ts, h = 253)
autoplot(wfc.ts)+
autolayer(forecast(stl.wfc, h = 253), series = "Pred")
accuracy(stl.wfc) #0.5622515
## ME RMSE MAE MPE MAPE
## Training set 0.01766315 0.5622515 0.3971173 0.03245101 0.7896173
## MASE ACF1
## Training set 0.05958405 0.01053205
#5 Apple
autoplot(aapl.ts)
stl.aapl <- stlf(aapl.ts, h = 253)
autoplot(aapl.ts)+
autolayer(forecast(stl.aapl, h = 253), series = "Pred")
accuracy(stl.aapl) #1.524812
## ME RMSE MAE MPE MAPE MASE
## Training set 0.08787856 1.524812 1.086323 0.06762393 1.048568 0.03588327
## ACF1
## Training set -0.003333371
#6 Exxon Mobile
autoplot(xom.ts)
stl.xom.box <- stlf(xom.ts, h = 253, lambda ="auto")
autoplot(xom.ts)+
autolayer(forecast(stl.xom.box, h = 253), series = "Pred")
accuracy(stl.xom.box) #0.7922227
## ME RMSE MAE MPE MAPE
## Training set -0.007389485 0.7922227 0.6102358 -0.01311804 0.7051523
## MASE ACF1
## Training set 0.06755975 -0.0006928527
#This model has the best RMSE
#7 AT&T
autoplot(at.ts)
stl.at <- stlf(at.ts, h = 253)
autoplot(at.ts)+
autolayer(forecast(stl.at, h = 253), series = "Pred")
accuracy(stl.at) #0.3136154
## ME RMSE MAE MPE MAPE
## Training set 0.0008430593 0.3136154 0.2377149 -0.00175276 0.6567895
## MASE ACF1
## Training set 0.08090683 0.0004429256
# 8 Verizon
autoplot(vz.ts)
stl.vz <- stlf(vz.ts, h = 253)
autoplot(vz.ts)+
autolayer(forecast(stl.vz, h = 253), series = "Pred")
accuracy(stl.vz) #0.446133
## ME RMSE MAE MPE MAPE
## Training set 0.003923409 0.446133 0.3372041 0.003990434 0.6903078
## MASE ACF1
## Training set 0.1024818 0.006692068
#9 Microsoft
autoplot(mfst.ts)
stl.mfst <- stlf(mfst.ts, h = 253)
autoplot(mfst.ts)+
autolayer(forecast(stl.mfst, h = 253), series = "Pred")
accuracy(stl.mfst) #0.6310272
## ME RMSE MAE MPE MAPE
## Training set 0.05121482 0.6310272 0.4366059 0.08697857 0.8965748
## MASE ACF1
## Training set 0.04146474 -0.004317787
#10 Chevron
autoplot(cvx.ts)
stl.cvx.box <- stlf(cvx.ts, h = 253, lambda ="auto")
autoplot(cvx.ts)+
autolayer(forecast(stl.cvx.box, h = 253), series = "Pred")
accuracy(stl.cvx.box) #1.139189
## ME RMSE MAE MPE MAPE
## Training set 0.001550476 1.139189 0.8496403 -0.004391602 0.8062989
## MASE ACF1
## Training set 0.05881019 0.01098219