rm(list = ls())
library(easypackages)
checkForPack = c("knitr","car","jsonlite","lattice","stringr","sp","tidyr","forecast", "dplyr",
"readr","expsmooth","TSA","dynlm","Hmisc","car","AER","tseries","lmtest",
"FitAR","fUnitRoots","FSAdata", "kableExtra", "truncnorm", "rugarch", "lubridate")
checkNInst = checkForPack %in% rownames(installed.packages())
if ( any(!checkNInst) ) { install.packages( checkForPack[!checkNInst] ) }
libraries(checkForPack)
source('C:/Arjun/RMIT/7. Time Series/Project/ts-bitcoin/residual.analysis.R')
source('C:/Arjun/RMIT/7. Time Series/Project/ts-bitcoin/sort.score.R')
source('C:/Arjun/RMIT/7. Time Series/Project/ts-bitcoin/MASE.R')
load("C:\\Arjun\\RMIT\\7. Time Series\\Project\\ts-bitcoin\\Report_10_06_19.RData")
#source('C:/Arjun/RMIT/7. Time Series/Functions/residual.analysis.R')
set.seed(738)Bitcoin is decentralized currency which is borderless, secure and enables unaltered transactions at low cast. Everything buyer needs to know from bull market surge to losing its value assessing the volatility of values of bitcoin prices. Tracking daily closing price of bitcoin (27th of April 2013 to the 24th of February 2019). Source: coinmarketcap.com.
Our task is to analyze the data by using the analysis methods covered in MATH1318 Time Series Analysis course in this semester, fit the best model that accurately predict the value of bitcoin for the next 10 days, and has a minimum MASE.
forecasting_period = 10
testSet = c(3882.7, 3854.36, 3851.05, 3854.79, 3859.58, 3864.42, 3847.18, 3761.56, 3896.38, 3903.94)
bitCoin_csv = read.csv("bitCoin_Historical_Price.csv")
bitCoin_csv$Close = as.numeric(as.character(gsub(",","",bitCoin_csv$Close))) # Will convert factor to a numeric value by stripping the commas to avoid NA's
bitCoin_ts = ts(as.numeric(as.vector(bitCoin_csv$Close)), start=c(2013, 117), frequency = 365)any(!is.finite(bitCoin_ts)) #checking for NA's## [1] FALSE
Bit coing closing prices from 27th April 2013 to 24th February 2019.
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(bitCoin_ts,
#axes = FALSE,
ylab='Closing Price in $',
xlab='Year',
type='l',
col = c("#003366"),
lwd=2,
main = "Time Series Plot of BitCoin Closing Prices (in $)")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366"),lwd=1.5,
c("Closing Price in $"))
grid()Fig 1.: Time Series Plot of BitCoin Closing Prices
From the above time series plot(Fig 1.) we can comment on the fundamental concepts mentioned below:
bitCoin_split = bitCoin_csv
bitCoin_split$Date <- as.Date(bitCoin_split$Date, format = "%d/%m/%y")
class(bitCoin_split$Date)## [1] "Date"
bitCoin_split_17 = bitCoin_split[bitCoin_split$Date >= as.Date("2017-04-01"),]
bitcoints = ts(bitCoin_split_17$Close,
start = c(2017, as.numeric(format(as.Date("2017-04-01"), "%j"))),
frequency = 365)par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(bitcoints,
# axes = FALSE,
ylab='Closing Price in $',
xlab='Year',
type='l',
col = c("#003366"),
lwd=2,
main = "Time Series Plot of BitCoin Closing Prices after 2017 split (in $)")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366"),lwd=2,
c("Closing Price in $ after split"))
grid()Fig 2.: Time Series Plot of BitCoin Closing Prices after split
Upon observation from the above time series plot(Fig 2.) we can notice the effect of Trend & Changing variance clearly. Might be worth considering the split series for modelling.
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(bitCoin_ts, main = "ACF plot for BitCoin Prices")
pacf(bitCoin_ts, main = "PACF plot for BitCoin Prices")Fig 3.: ACF and PACF Plots of BitCoin Closing Prices
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(bitcoints, main = "ACF plot for BitCoin Prices post split")
pacf(bitcoints, main = "PACF plot for BitCoin Prices post split")Fig 4.: ACF and PACF Plots of BitCoin Closing Prices
#adf_test_model <- adf.test(bitcoin_ts)The above (Fig 3.) shows the ACF and PACF plots for the original series. There is a strong decaying pattern in ACF which shows that the series has a AUto Regressive behaviour and it is expected as from the (Fig 1.) time series plot we noticed the successive data points in the series. (Fig 4.) ACF & PACF plots of the split series shows the same AR behaviour. But since the ACF & PACF plots shows a strong declining trend which shows the evidence of non-stationarity, we must first deal with trend & changing variance'before we comment on the behaviour of the series*.
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
y = bitCoin_ts
x = zlag(bitCoin_ts)
index = 2:length(x)
cor = cor(y[index],x[index])
plot(y=bitCoin_ts,
x=zlag(bitCoin_ts),
ylab='Closing Price in $ after split',
xlab='Previous Year Closing Price in $',
main = "Relationship between successive data points",
col = c("#003366"))
legend("topleft",lty=3, bty = "n" ,text.width = 15, col=c("#003366"),lwd=4,
c(paste0("Correlation: ", round(cor,4))))
grid()Fig 5.: Relationship between successive data points
From (Fig 5.), we see the time series has a strong Auto Regressive behaviour with high correlation value [* 0.9976471 *] between the succeeding data points.
Considering above (Fig 3.) & (Fig 4.) & (Fig 5.); we see a strong trend component in the series. So we can proceed with basic Linear Trend model to see how well it fits our series. Further we can also go for higher order ARMA models with AR components of possible values [p = c(1,2,3,4,5)] & q = 0[purely based on (Fig 3.) & (Fig 4.).
lmModel = lm(bitCoin_ts~ time(bitCoin_ts))
summary(lmModel)##
## Call:
## lm(formula = bitCoin_ts ~ time(bitCoin_ts))
##
## Residuals:
## Min 1Q Median 3Q Max
## -2907.7 -1865.4 -371.2 1212.0 14773.3
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -2.778e+06 6.383e+04 -43.52 <2e-16 ***
## time(bitCoin_ts) 1.379e+03 3.166e+01 43.56 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 2461 on 2128 degrees of freedom
## Multiple R-squared: 0.4714, Adjusted R-squared: 0.4711
## F-statistic: 1898 on 1 and 2128 DF, p-value: < 2.2e-16
The linear trend model is significant with p-value very negligible[close to 0]. The coefficients and intercept terms are significant. Residual errors are higher but can be analysed further in our residual analysis. However, the R2 values are pretty much low which questions whether the model is a good fit for this data set.
Let us fit the model.
par(mar=c(5,4,4,3),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(bitCoin_ts,
ylab='Closing Price in $ after split',
xlab='Year',
type='l',
col = c("#003366"),
lwd = 2,
main = "Fitted Linear Curve to the BitCoin Closing Price Series")
#lines(as.vector(fitted(lmModel)), col="black", lwd = 1)
abline(lmModel, col = "red", lwd = 2)
legend("topleft",lty=1, text.width = 20, col=c("#003366","red"), bty = "n",
c("BitCoin Series", "Fitted Linear Curve"))Fig 6.: Fitted Curves to the BitCoin Closing Proce Series
(Fig 6.) confirms visually that the linear Trend model is not a good fit for the time series data. We can further perform a Residual check and confirm that as well.
residual.analysis(lmModel)##
## Shapiro-Wilk normality test
##
## data: res.model
## W = 0.82362, p-value < 2.2e-16
## Warning in (ra^2)/(n - (1:lag.max)): longer object length is not a multiple
## of shorter object length
Fig 7.: Residual Analysis of Linear Trend Model
Residual Analysis (Fig 7.) results proves that the linear trend model is not a good fit for this series. This might be because of the Auto Regressive behaviour we see on the ACF & PACF plots from (Fig 3.) & (Fig 4.), the residual analysis also confirms the AR effect. Hence we might have to go for different models such as ARIMA & ARIMA-GARCH models.
Before we proceed further with ARIMA models, the requirement is that the time series must be stationary. Let us perform a ADF Test and see whether the series pass that requirement.
testAdf = adf.test(bitCoin_ts)
testAdf##
## Augmented Dickey-Fuller Test
##
## data: bitCoin_ts
## Dickey-Fuller = -2.6913, Lag order = 12, p-value = 0.2856
## alternative hypothesis: stationary
With * 0.2856399. * > 0.05 we infer that there is no enough statistical evidence & we fail to reject the H0:. Hence the series is non-stationary.
As we mentioned above, before concluding about the behaviour of the series we must first deal with trend followed by changing variance.
Let us go with log transformation with lambda value = 0, since we see a changing variance in the series.
lambda = 0
bitCoin.log = log(bitCoin_ts)round(min(bitCoin_ts),2) #checking for any negative values before## [1] 68.43
bitCoin.BoxCox = BoxCox.ar(bitCoin_ts, seq(-1,1,0.01), method = "ols")lambda <- (as.numeric(bitCoin.BoxCox$ci[1]) + as.numeric(bitCoin.BoxCox$ci[2])) / 2
lambda## [1] -0.1
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(bitCoin.log,
# axes = FALSE,
ylab='Closing Price in $',
xlab='Year',
type='l',
col = c("#003366"),
lwd=2,
main = "Time Series Plot of Transformed BitCoin Closing Prices (in $)")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366"),lwd=2,
c("log transformed series"))Fig 5.: Time Series Plot of Transformed BitCoin Closing Prices
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(bitCoin.log, main = "ACF plot for tranformed BitCoin Prices")
pacf(bitCoin.log, main = "PACF plot for transformed BitCoin Prices")Fig 6.: ACF and PACF Plots of log tranformed BitCoin Closing Prices
From (Fig 5.) the log transformed series still has the incremental trend and the ACF & PACF plots (Fig 6.) still shows prevalance of non-stationarity with some changing variance. We can perform adf test again to confirm if the log transformed series is stationary.
testAdf = adf.test(bitCoin.log)
testAdf##
## Augmented Dickey-Fuller Test
##
## data: bitCoin.log
## Dickey-Fuller = -1.6272, Lag order = 12, p-value = 0.7362
## alternative hypothesis: stationary
With * 0.7361506. * > 0.05 we infer that there is no enough statistical evidence & we fail to reject the H0:. Hence the series is non-stationary.
To de-rend the series we will perform 1st differencing.
diff.bitCoin.log = diff(bitCoin.log, differences = 1)
testAdf = adf.test(diff.bitCoin.log)## Warning in adf.test(diff.bitCoin.log): p-value smaller than printed p-value
testAdf##
## Augmented Dickey-Fuller Test
##
## data: diff.bitCoin.log
## Dickey-Fuller = -11.171, Lag order = 12, p-value = 0.01
## alternative hypothesis: stationary
With * 0.01. * < 0.05 we infer that there is enough statistical evidence & we reject the H0:. Hence the Ha: : series is stationary. Now that we have a stationary series, let us take a visual inspection of the differenced series and comment on the behaviour of the series.
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(diff.bitCoin.log,
# axes = FALSE,
ylab='Closing Price in $',
xlab='Year',
type='l',
col = c("#003366"),
lwd=1,
main = "Time Series Plot of Transformed BitCoin Closing Prices (in $)")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366"),lwd=1,
c("Log-tranformed Differenced Series"))Fig 7.: Time Series Plot of Differenced Series
From (Fig 7.), we can interpret as the series does not have any trend compared to the earlier non- differenced series.
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(diff.bitCoin.log, main = "ACF plot for differenced series")
pacf(diff.bitCoin.log, main = "PACF plot for differenced series")Fig 8.: ACF and PACF Plots of Differenced series
Fig 10.: BIC Plot of Differenced series
ARIMA(6,1,6)
Function
.GlobalEnv$.__C__Arima## Virtual Class "Arima" [in ".GlobalEnv"]
##
## Slots:
##
## Name: .S3Class
## Class: character
##
## Extends: "oldClass"
.GlobalEnv$.__C__ARIMA## Virtual Class "ARIMA" [in ".GlobalEnv"]
##
## Slots:
##
## Name: .S3Class
## Class: character
##
## Extends: "oldClass"
.GlobalEnv$.__C__forecast## Virtual Class "forecast" [in ".GlobalEnv"]
##
## Slots:
##
## Name: .S3Class
## Class: character
##
## Extends: "oldClass"
#setOldClass(c("Arima", "ARIMA", "forecast"))
# flag = FALSE
forecasting_period = 10
# if (flag == TRUE) max_order = 2 else {
# max_order = 6
# smaller_max_order = 3
# }
arma_garch_model_list = list()
arima_model_list <- list()
#
# first_arima_model_list = list()
# last_arima_model_list = list()
# first_arma_garch_model_list = list()
# last_arma_garch_model_list = list()
setClass(Class="ArimaClass",
representation(
key="character",
model_name="character",
ar="numeric",
ma="numeric",
d="numeric",
est_method="character",
aic="numeric",
bic="numeric",
fitted_mase="numeric",
forc_mase="numeric",
fitted_val="numeric",
forc_val="numeric",
model_forc="forecast",
model_fit="ARIMA",
model="Arima"
)
)
setClass(Class="ARMA_GARCH_Models",
representation(
key="character",
model_name="character",
mean_flag="logical",
ar="numeric",
ma="numeric",
alpha_order="numeric",
beta_order="numeric",
d="numeric",
est_method = "character",
aic="numeric",
bic="numeric",
fitted_mase="numeric",
forc_mase="numeric",
fitted_val="numeric",
forc_val = "numeric",
model_forc="uGARCHforecast",
model_fit= "uGARCHfit"
)
)Execution
arima_model_list = list()
for (nar in 1:8) {
for (nma in 1:8) {
arima_model_list <- generateARIMAModels(diff.bitCoin.log,
ar=nar, d=1, ma=nma,
arima_model_list, lambda,testSet,"Modelling")
}
}## [1] "Processing :ARIMA(1 , 1 , 1) - ML"
## [1] "Processing :ARIMA(1 , 1 , 1) - CSS"
## [1] "Processing :ARIMA(1 , 1 , 2) - ML"
## [1] "Processing :ARIMA(1 , 1 , 2) - CSS"
## [1] "Processing :ARIMA(1 , 1 , 3) - ML"
## [1] "Processing :ARIMA(1 , 1 , 3) - CSS"
## [1] "Processing :ARIMA(1 , 1 , 4) - ML"
## [1] "Processing :ARIMA(1 , 1 , 4) - CSS"
## [1] "Processing :ARIMA(1 , 1 , 5) - ML"
## [1] "Processing :ARIMA(1 , 1 , 5) - CSS"
## [1] "Processing :ARIMA(1 , 1 , 6) - ML"
## [1] "Processing :ARIMA(1 , 1 , 6) - CSS"
## [1] "Processing :ARIMA(1 , 1 , 7) - ML"
## [1] "Processing :ARIMA(1 , 1 , 7) - CSS"
## [1] "Processing :ARIMA(1 , 1 , 8) - ML"
## [1] "Processing :ARIMA(1 , 1 , 8) - CSS"
## [1] "Processing :ARIMA(2 , 1 , 1) - ML"
## [1] "Processing :ARIMA(2 , 1 , 1) - CSS"
## [1] "Processing :ARIMA(2 , 1 , 2) - ML"
## [1] "Processing :ARIMA(2 , 1 , 2) - CSS"
## [1] "Processing :ARIMA(2 , 1 , 3) - ML"
## [1] "Processing :ARIMA(2 , 1 , 3) - CSS"
## [1] "Processing :ARIMA(2 , 1 , 4) - ML"
## [1] "Processing :ARIMA(2 , 1 , 4) - CSS"
## [1] "Processing :ARIMA(2 , 1 , 5) - ML"
## [1] "Processing :ARIMA(2 , 1 , 5) - CSS"
## [1] "Processing :ARIMA(2 , 1 , 6) - ML"
## [1] "Processing :ARIMA(2 , 1 , 6) - CSS"
## [1] "Processing :ARIMA(2 , 1 , 7) - ML"
## [1] "Processing :ARIMA(2 , 1 , 7) - CSS"
## [1] "Processing :ARIMA(2 , 1 , 8) - ML"
## [1] "Processing :ARIMA(2 , 1 , 8) - CSS"
## [1] "Processing :ARIMA(3 , 1 , 1) - ML"
## [1] "Processing :ARIMA(3 , 1 , 1) - CSS"
## [1] "Processing :ARIMA(3 , 1 , 2) - ML"
## [1] "Processing :ARIMA(3 , 1 , 2) - CSS"
## [1] "Processing :ARIMA(3 , 1 , 3) - ML"
## [1] "Processing :ARIMA(3 , 1 , 3) - CSS"
## [1] "Processing :ARIMA(3 , 1 , 4) - ML"
## [1] "Processing :ARIMA(3 , 1 , 4) - CSS"
## [1] "Processing :ARIMA(3 , 1 , 5) - ML"
## [1] "Processing :ARIMA(3 , 1 , 5) - CSS"
## [1] "Processing :ARIMA(3 , 1 , 6) - ML"
## [1] "Processing :ARIMA(3 , 1 , 6) - CSS"
## [1] "Processing :ARIMA(3 , 1 , 7) - ML"
## [1] "Processing :ARIMA(3 , 1 , 7) - CSS"
## [1] "Processing :ARIMA(3 , 1 , 8) - ML"
## [1] "Processing :ARIMA(3 , 1 , 8) - CSS"
## [1] "Processing :ARIMA(4 , 1 , 1) - ML"
## [1] "Processing :ARIMA(4 , 1 , 1) - CSS"
## [1] "Processing :ARIMA(4 , 1 , 2) - ML"
## [1] "Processing :ARIMA(4 , 1 , 2) - CSS"
## [1] "Processing :ARIMA(4 , 1 , 3) - ML"
## [1] "Processing :ARIMA(4 , 1 , 3) - CSS"
## [1] "Processing :ARIMA(4 , 1 , 4) - ML"
## [1] "Processing :ARIMA(4 , 1 , 4) - CSS"
## [1] "Processing :ARIMA(4 , 1 , 5) - ML"
## [1] "Processing :ARIMA(4 , 1 , 5) - CSS"
## [1] "Processing :ARIMA(4 , 1 , 6) - ML"
## [1] "Processing :ARIMA(4 , 1 , 6) - CSS"
## [1] "Processing :ARIMA(4 , 1 , 7) - ML"
## [1] "Processing :ARIMA(4 , 1 , 7) - CSS"
## [1] "Processing :ARIMA(4 , 1 , 8) - ML"
## [1] "Processing :ARIMA(4 , 1 , 8) - CSS"
## [1] "Processing :ARIMA(5 , 1 , 1) - ML"
## [1] "Processing :ARIMA(5 , 1 , 1) - CSS"
## [1] "Processing :ARIMA(5 , 1 , 2) - ML"
## [1] "Processing :ARIMA(5 , 1 , 2) - CSS"
## [1] "Processing :ARIMA(5 , 1 , 3) - ML"
## [1] "Processing :ARIMA(5 , 1 , 3) - CSS"
## [1] "Processing :ARIMA(5 , 1 , 4) - ML"
## [1] "Processing :ARIMA(5 , 1 , 4) - CSS"
## [1] "Processing :ARIMA(5 , 1 , 5) - ML"
## [1] "Processing :ARIMA(5 , 1 , 5) - CSS"
## [1] "Processing :ARIMA(5 , 1 , 6) - ML"
## [1] "Processing :ARIMA(5 , 1 , 6) - CSS"
## [1] "Processing :ARIMA(5 , 1 , 7) - ML"
## [1] "Processing :ARIMA(5 , 1 , 7) - CSS"
## [1] "Processing :ARIMA(5 , 1 , 8) - ML"
## [1] "Processing :ARIMA(5 , 1 , 8) - CSS"
## [1] "Processing :ARIMA(6 , 1 , 1) - ML"
## [1] "Processing :ARIMA(6 , 1 , 1) - CSS"
## [1] "Processing :ARIMA(6 , 1 , 2) - ML"
## [1] "Processing :ARIMA(6 , 1 , 2) - CSS"
## [1] "Processing :ARIMA(6 , 1 , 3) - ML"
## [1] "Processing :ARIMA(6 , 1 , 3) - CSS"
## [1] "Processing :ARIMA(6 , 1 , 4) - ML"
## [1] "Processing :ARIMA(6 , 1 , 4) - CSS"
## [1] "Processing :ARIMA(6 , 1 , 5) - ML"
## [1] "Processing :ARIMA(6 , 1 , 5) - CSS"
## [1] "Processing :ARIMA(6 , 1 , 6) - ML"
## [1] "Processing :ARIMA(6 , 1 , 6) - CSS"
## [1] "Processing :ARIMA(6 , 1 , 7) - ML"
## [1] "Processing :ARIMA(6 , 1 , 7) - CSS"
## [1] "Processing :ARIMA(6 , 1 , 8) - ML"
## [1] "Processing :ARIMA(6 , 1 , 8) - CSS"
## [1] "Processing :ARIMA(7 , 1 , 1) - ML"
## [1] "Processing :ARIMA(7 , 1 , 1) - CSS"
## [1] "Processing :ARIMA(7 , 1 , 2) - ML"
## [1] "Processing :ARIMA(7 , 1 , 2) - CSS"
## [1] "Processing :ARIMA(7 , 1 , 3) - ML"
## [1] "Processing :ARIMA(7 , 1 , 3) - CSS"
## [1] "Processing :ARIMA(7 , 1 , 4) - ML"
## [1] "Processing :ARIMA(7 , 1 , 4) - CSS"
## [1] "Processing :ARIMA(7 , 1 , 5) - ML"
## [1] "Processing :ARIMA(7 , 1 , 5) - CSS"
## [1] "Processing :ARIMA(7 , 1 , 6) - ML"
## [1] "Processing :ARIMA(7 , 1 , 6) - CSS"
## [1] "Processing :ARIMA(7 , 1 , 7) - ML"
## [1] "Processing :ARIMA(7 , 1 , 7) - CSS"
## [1] "Processing :ARIMA(7 , 1 , 8) - ML"
## [1] "Processing :ARIMA(7 , 1 , 8) - CSS"
## [1] "Processing :ARIMA(8 , 1 , 1) - ML"
## [1] "Processing :ARIMA(8 , 1 , 1) - CSS"
## [1] "Processing :ARIMA(8 , 1 , 2) - ML"
## [1] "Processing :ARIMA(8 , 1 , 2) - CSS"
## [1] "Processing :ARIMA(8 , 1 , 3) - ML"
## [1] "Processing :ARIMA(8 , 1 , 3) - CSS"
## [1] "Processing :ARIMA(8 , 1 , 4) - ML"
## [1] "Processing :ARIMA(8 , 1 , 4) - CSS"
## [1] "Processing :ARIMA(8 , 1 , 5) - ML"
## [1] "Processing :ARIMA(8 , 1 , 5) - CSS"
## [1] "Processing :ARIMA(8 , 1 , 6) - ML"
## [1] "Processing :ARIMA(8 , 1 , 6) - CSS"
## [1] "Processing :ARIMA(8 , 1 , 7) - ML"
## [1] "Processing :ARIMA(8 , 1 , 7) - CSS"
## [1] "Processing :ARIMA(8 , 1 , 8) - ML"
## [1] "Processing :ARIMA(8 , 1 , 8) - CSS"
results_df = data.frame(model_name=character(),
ar_order=numeric(), d_order=numeric(),ma_order=numeric(),
alpha_order = numeric(), beta_order = numeric(),
est_method = character(), mase_forecast = numeric(), mase_fitted = numeric(), aic = numeric(),
bic = numeric())
names_vec = colnames(results_df)
names(results_df) <- names_vec
len <- length(arima_model_list)
for(il in 1:len) {
index <- (gregexpr(pattern ='\\(', arima_model_list[[il]]@key)[[1]][1]) -1
temp <- data.frame(substring(text = arima_model_list[[il]]@key,1, index),
arima_model_list[[il]]@ar,
arima_model_list[[il]]@d,
arima_model_list[[il]]@ma,
NA,
NA,
arima_model_list[[il]]@est_method,
arima_model_list[[il]]@forc_mase,
arima_model_list[[il]]@fitted_mase,
arima_model_list[[il]]@aic,
arima_model_list[[il]]@bic)
names(temp)<-names_vec
results_df <- rbind(results_df,temp)
}newDf <- arrange(results_df, mase_forecast, mase_fitted)
newDf <- subset(newDf, select = c(-alpha_order, -beta_order))kable(newDf[1:5,], caption = "Best MASE from the ARIMA models - Top 5") %>%
kable_styling("striped", full_width = F, font_size = 7, position = "center") %>%
column_spec(c(6,7), bold = T, color = "white", background = "#ed9121") #"#FF4040") | model_name | ar_order | d_order | ma_order | est_method | mase_forecast | mase_fitted | aic | bic |
|---|---|---|---|---|---|---|---|---|
| ARIMA | 1 | 1 | 1 | CSS | 0.9169127 | 5.820370 | -7289.603 | -7272.613 |
| ARIMA | 5 | 1 | 2 | CSS | 2.1153031 | 5.739614 | -7292.512 | -7247.204 |
| ARIMA | 2 | 1 | 8 | CSS | 2.2483379 | 4.843105 | -7288.489 | -7226.191 |
| ARIMA | 2 | 1 | 1 | CSS | 2.4957636 | 3.125585 | -7288.641 | -7265.987 |
| ARIMA | 2 | 1 | 3 | CSS | 2.6213102 | 4.869749 | -7284.240 | -7250.259 |
print("Results for ARIMA(1,1,1)")## [1] "Results for ARIMA(1,1,1)"
model_111_css = arima(diff.bitCoin.log,order=c(1,1,1),method='CSS')
coeftest(model_111_css)##
## z test of coefficients:
##
## Estimate Std. Error z value Pr(>|z|)
## ar1 -0.0287102 0.0214228 -1.3402 0.1802
## ma1 -0.9746954 0.0054057 -180.3085 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
print("Results for ARIMA(5,1,2)")## [1] "Results for ARIMA(5,1,2)"
model_512_css = arima(diff.bitCoin.log,order=c(5,1,2),method='CSS')
coeftest(model_512_css)##
## z test of coefficients:
##
## Estimate Std. Error z value Pr(>|z|)
## ar1 -0.7733735 0.0710130 -10.8906 < 2.2e-16 ***
## ar2 -0.0889116 0.0262470 -3.3875 0.0007053 ***
## ar3 -0.0800373 0.0257986 -3.1024 0.0019196 **
## ar4 -0.0276754 0.0279020 -0.9919 0.3212562
## ar5 -0.0065301 0.0227844 -0.2866 0.7744164
## ma1 -0.2344971 0.0677574 -3.4608 0.0005385 ***
## ma2 -0.7119915 0.0724533 -9.8269 < 2.2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
print("Results for ARIMA(2,1,8)")## [1] "Results for ARIMA(2,1,8)"
model_218_css = arima(diff.bitCoin.log,order=c(2,1,8),method='CSS')
coeftest(model_218_css)##
## z test of coefficients:
##
## Estimate Std. Error z value Pr(>|z|)
## ar1 -1.162404 0.079720 -14.5810 < 2.2e-16 ***
## ar2 -0.665243 0.102700 -6.4775 9.325e-11 ***
## ma1 0.179438 0.080442 2.2307 0.02570 *
## ma2 -0.490501 0.092863 -5.2820 1.278e-07 ***
## ma3 -0.641776 0.105011 -6.1115 9.871e-10 ***
## ma4 0.041857 0.028625 1.4623 0.14367
## ma5 0.055687 0.027447 2.0288 0.04247 *
## ma6 0.030577 0.023817 1.2838 0.19921
## ma7 -0.039534 0.023047 -1.7154 0.08627 .
## ma8 -0.039701 0.021779 -1.8229 0.06832 .
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
print("Results for ARIMA(2,1,1)")## [1] "Results for ARIMA(2,1,1)"
model_211_css = arima(diff.bitCoin.log,order=c(2,1,1),method='CSS')
coeftest(model_211_css)##
## z test of coefficients:
##
## Estimate Std. Error z value Pr(>|z|)
## ar1 -0.064008 0.023260 -2.7519 0.005926 **
## ar2 -0.097333 0.022157 -4.3929 1.118e-05 ***
## ma1 -0.898976 0.011434 -78.6206 < 2.2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
print("Results for ARIMA(2,1,3)")## [1] "Results for ARIMA(2,1,3)"
model_213_css = arima(diff.bitCoin.log,order=c(2,1,3),method='CSS')
coeftest(model_213_css)##
## z test of coefficients:
##
## Estimate Std. Error z value Pr(>|z|)
## ar1 -1.180745 0.078714 -15.0005 < 2.2e-16 ***
## ar2 -0.680335 0.086561 -7.8596 3.853e-15 ***
## ma1 0.217337 0.081118 2.6793 0.007379 **
## ma2 -0.473441 0.081981 -5.7750 7.696e-09 ***
## ma3 -0.646717 0.086064 -7.5144 5.718e-14 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
From the above models tried, we witness that only ARIMA(2,1,1) & ARIMA(2,1,3) are having all significant coefficients. Also from (Fig 11.) above we notice that ARIMA(2,1,1) has the best MASE compared to its latter.
Hence considering both the significance of coefficients and MASE values, we can choose ARIMA(2,1,1) as the best model. However we will have to consider overfitting and residual test to confirm.
We will perform overfitting test by incrementing the orders of p & q to 1 keeping the best model chosen as base i.e ARIMA(2,1,1).
print("Results for ARIMA(2,1,2)")## [1] "Results for ARIMA(2,1,2)"
model_212_css = arima(diff.bitCoin.log,order=c(2,1,2),method='CSS')
coeftest(model_212_css)##
## z test of coefficients:
##
## Estimate Std. Error z value Pr(>|z|)
## ar1 -0.884826 0.059269 -14.9289 < 2e-16 ***
## ar2 -0.043438 0.022417 -1.9377 0.05265 .
## ma1 -0.095207 0.056040 -1.6989 0.08934 .
## ma2 -0.790719 0.054968 -14.3850 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
print("Results for ARIMA(3,1,1)")## [1] "Results for ARIMA(3,1,1)"
model_311_css = arima(diff.bitCoin.log,order=c(3,1,1),method='CSS')
coeftest(model_311_css)##
## z test of coefficients:
##
## Estimate Std. Error z value Pr(>|z|)
## ar1 -0.018781 0.024891 -0.7545 0.450545
## ar2 -0.056800 0.023262 -2.4417 0.014616 *
## ar3 -0.061427 0.022153 -2.7729 0.005557 **
## ma1 -0.932636 0.011115 -83.9063 < 2.2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
From above results we can reject the ARIMA(3,1,1) model as the coefficients aren’t significant. In case of ARIMA(2,1,2) it has ar2 and ma1 coefficients nearly significant[90% significance level], but lower than the ARIMA(2,1,1) model and also from below (Fig 12.) we see that the error component[MASE] is lower for ARIMA(2,1,1) model. Hence we will go for that model over the ARIMA(2,1,2). But we will perform residual analysis before forecasting.
kable(newDf[ c(4,10),], caption = "Comparing MASE of ARIMA(2,1,1) and ARIMA(2,1,2)") %>%
kable_styling("striped", full_width = F, font_size = 7, position = "left") %>%
column_spec(c(7,8), bold = T, color = "white", background = "#ed9121") | model_name | ar_order | d_order | ma_order | est_method | mase_forecast | mase_fitted | aic | bic | |
|---|---|---|---|---|---|---|---|---|---|
| 4 | ARIMA | 2 | 1 | 1 | CSS | 2.495764 | 3.125585 | -7288.641 | -7265.987 |
| 10 | ARIMA | 2 | 1 | 2 | CSS | 3.767458 | 3.646530 | -7286.471 | -7258.154 |
residual.analysis(model_211_css)##
## Shapiro-Wilk normality test
##
## data: res.model
## W = 0.88488, p-value < 2.2e-16
## Warning in (ra^2)/(n - (1:lag.max)): longer object length is not a multiple
## of shorter object length
Fig 13.: Residual Analysis of ARIMA(2,1,1) Model
From (Fig 13.) we see that the standardised residuals are approximately normal gicing us the bell-curve shape. Also the qq plot supports the normality check for the standardised residuals with tails showing some variance, might be possible noise or outliers. Ljung-Box Test shows the evidence of Auto regressive behaviour in the series.
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
arima_key <- "ARIMA(2 , 1 , 1) - CSS"
plot(bitCoin_ts,
type="l",
lwd = 2,
col = c("#003366"),
main = "BitCoin Closing Price Series",
ylab = "Closing Price in $",
xlab = "Year")
abline(lmModel, col = "red", lwd = 2)
lines(arima_model_list[[arima_key]]@model_fit$fitted, col= c("green"), lwd = 2)
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366","red", "green"),
c("Original Series", "Fitted Linear Model", "Fitted ARIMA(2,1,1) Model"))
grid()Fig 14.: Fitted Curves to the BitCoin Closing Price Series
forecasting_period = 10
fit = Arima(bitcoints, model = model_212_css, lambda=0.5)
bitCoinDataForecast = forecast(fit,h=forecasting_period)
knitr::kable(bitCoinDataForecast,
caption = "10-Step ahead Forecast for Closing Prices of Bitcoin Series") %>%
kable_styling("striped", full_width = F, font_size = 12) %>%
column_spec(2, bold = T, color = "white", background = "#ed9121") | Point Forecast | Lo 80 | Hi 80 | Lo 95 | Hi 95 | |
|---|---|---|---|---|---|
| 2019.1507 | 3736.225 | 3732.758 | 3739.695 | 3730.922 | 3741.532 |
| 2019.1534 | 3741.423 | 3737.952 | 3744.896 | 3736.116 | 3746.735 |
| 2019.1562 | 3740.032 | 3736.557 | 3743.509 | 3734.718 | 3745.350 |
| 2019.1589 | 3741.037 | 3737.554 | 3744.522 | 3735.711 | 3746.367 |
| 2019.1616 | 3740.208 | 3736.721 | 3743.698 | 3734.875 | 3745.545 |
| 2019.1644 | 3740.898 | 3737.403 | 3744.395 | 3735.553 | 3746.246 |
| 2019.1671 | 3740.324 | 3736.824 | 3743.825 | 3734.972 | 3745.680 |
| 2019.1699 | 3740.802 | 3737.295 | 3744.311 | 3735.439 | 3746.169 |
| 2019.1726 | 3740.404 | 3736.892 | 3743.918 | 3735.033 | 3745.779 |
| 2019.1753 | 3740.735 | 3737.216 | 3744.256 | 3735.354 | 3746.120 |
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1)
plot(bitCoinDataForecast,
type="l",
lwd = 2,
col = c("#003366"),
main = "BitCoin Closing Price Series",
ylab = "Closing Price in $",
xlab = "Year")
# lines(fitted_series,col = c("red"), type="s")
# lines(forecast_series,col = c("green"), type="o")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366", "blue"),
c("Bit coin closing price (in $)", "10 step ahead Forecast"))
grid()Fig 15.: 10-step ahead Forecast to the BitCoin Closing Proce Series
From (Fig 14.) we see that the ARIMA(2,1,2) model has fitted will to the raw series, however it is unable to capture the peak at 2018 which might be due to the conditional heteroscedasticity nature of the bitcoin prices. To handle such type of series we opt for a ARMA + GARCH model together.
Steps we have followed to fit the ARMA + GARCH to the series:-
The reason for step iv. is that we already have the stationary series from the ARIMA model and we are enhancing our model to handle the GARCH effect.
For this we will loop over the function to run through the possible alpha and beta orders and write the result to the dataframe for further use.
max_order = 6
arma_garch_modelling = function(ts, return_series, log_series, validation_set, model_name, garch_alpha_order, garch_beta_order, ar_order, ma_order, mean_flag, dist_model, arma_garch_model_list) {
garch_order = c(garch_alpha_order,garch_beta_order)
arma_order = c(ar_order,ma_order)
model_spec = ugarchspec(variance.model = list(model = model_name , garchOrder = garch_order),
mean.model = list(armaOrder = arma_order, include.mean = mean_flag),
distribution.model = dist_model)
model_fit = ugarchfit(spec=model_spec,data=return_series)
fitted <- exp(diffinv(as.vector(fitted(model_fit)), differences = 1, xi = as.vector(log_series[1])))
fitted_mase <- MASE(as.vector(ts), as.vector(fitted))[[1]][,1]
forecast_model <- ugarchforecast(model_fit, data = return_series, n.ahead = forecasting_period)
forecast_fitted <- exp(diffinv(as.vector(forecast_model@forecast$seriesFor),
differences = 1, xi = log_series[length(log_series)]))
forecast_mase <- MASE(as.vector(testSet), as.vector(forecast_fitted[2:11]))[[1]][,1]
aic = infocriteria(model_fit)[1,1]
bic = infocriteria(model_fit)[2,1]
if (mean_flag == TRUE) d_order = 1 else d_order = 0
key = paste(ar_order,d_order,ma_order,garch_alpha_order,garch_beta_order,sep = "")
arma_garch_model_list[[key]] = new("ARMA_GARCH_Models",
key=key,
model_name = model_name,
mean_flag = mean_flag,
ar= ar_order,
d = d_order,
ma= ma_order,
alpha_order = garch_alpha_order,
beta_order = garch_beta_order,
est_method = dist_model,
fitted_mase=fitted_mase,
forc_mase=forecast_mase,
aic=aic,
bic=bic,
fitted_val=fitted,
forc_val=forecast_fitted,
model_fit=model_fit,
model_forc=forecast_model)
rm(garch_order,arma_order, model_spec, model_fit, fitted, fitted_mase, forecast_model,forecast_fitted,forecast_mase, aic, bic, key, d_order)
return (arma_garch_model_list)
}model_211_css.r = model_211_css$residuals
abs.res = abs(model_211_css.r)
sq.res = model_211_css.r^2par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(abs.res, ci.type = "ma", main = "ACF plot for absolute residual series")
pacf(abs.res, main = "PACF plot for absolute residual series")Fig 16.: ACF and PACF plots for the absolute residual series
eacf(abs.res)## AR/MA
## 0 1 2 3 4 5 6 7 8 9 10 11 12 13
## 0 x x x x x x x x x x x x x x
## 1 x x o x x o o o o o o o o o
## 2 x x o o o o o o o o o o x o
## 3 x x x x o o o o o o o o x o
## 4 x x x x o o o o o o o o x o
## 5 x x x x o x o o o o o o o o
## 6 x x x o x x o o o o o o o o
## 7 x x o x x x o x o o o o o o
From (Fig 15.) we have ARMA(1,2) and ARMA(2,2), we can also try ARMA(2,3) , ARMA(2,4) These models correspond to the parameter settings of [max(1,2),1], [max(2,2),2], [max(2,3),2] and [max(2,4),2]. One thing to be noted here is the ACF and the PACF plots show mutiple autocorrelations and basically higher ARMA orders. Since we have the best ARMA model parameters we will ignore the higher orders that ACF anf PACF plot shows.
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(sq.res, ci.type = "ma", main = "ACF plot for squared residual series")
pacf(sq.res, main = "PACF plot for squared residual series")Fig 17.: ACF and PACF plots for the squared residual series
eacf(sq.res)## AR/MA
## 0 1 2 3 4 5 6 7 8 9 10 11 12 13
## 0 x x x x x x x x x x x x x x
## 1 x x x x x o o o o x o x x x
## 2 x x x o x o o o o o o o x x
## 3 x x o x o o o o o o o x x o
## 4 x x o x o o o o o o o o x x
## 5 x x x x o x o o o o o o x o
## 6 x x x x x x o o o o o o x o
## 7 x x x x x x o x o o o o x x
From (Fig 18.) awe go for higher orders such as ARMA(1,5), ARMA(1,6), ARMA(2,5) and ARMA(2,6). These models correspond to the parameter settings of [max(1,5),1], [max(1,6),1], [max(2,5),2] and [max(2,6),2]
ar_order_seq = seq(1:2)
ma_order_seq = seq(1:2)
garch_alpha_order_seq = seq (1:max_order)
garch_beta_order_seq = seq ( 1:max_order)
mean_flag_seq = c(TRUE,FALSE)
distribution = c("norm", "std")
for(ar_order in ar_order_seq) {
for (ma_order in ma_order_seq) {
for (garch_alpha_order in garch_alpha_order_seq) {
for (garch_beta_order in garch_beta_order_seq) {
for (mean_flag in mean_flag_seq){
model_name = "sGARCH"
dist_model = distribution
print(paste("ar_order :",ar_order,"ma_order :",ma_order,
"garch_alpha_order:",garch_alpha_order,
"garch_beta_order:", garch_beta_order,
sep=" "))
# try catch start
tryCatch({
# TO-DO: method selection based on original or return series based on best MASE
arma_garch_model_list = arma_garch_modelling(
bitCoin_ts,
diff.bitCoin.log,
bitCoin.log,
testSet, model_name,
garch_alpha_order,garch_beta_order,
ar_order, ma_order,mean_flag,
dist_model, arma_garch_model_list)
},error=function(e){
print(paste(e," for the model ", "ar_order :",ar_order,"ma_order :",ma_order,
"garch_alpha_order:",garch_alpha_order,
"garch_beta_order:", garch_beta_order,
sep=" "))
})
# try catch end
} # end of mean flag seq
}# end of garch_beta_order seq
}# end of garch_alpha_order seq
}# end of ma_order_seq seq
}# end of ar_order_seq seq## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 1"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 2"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 3"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 4"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 5"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 6"
## [1] "ar_order : 1 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 1 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 2 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 3 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 4 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 5 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 1 garch_alpha_order: 6 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 1 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 2 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 3 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 4 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 5 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 1"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 2"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 3"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 4"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 5"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 6"
## [1] "ar_order : 2 ma_order : 2 garch_alpha_order: 6 garch_beta_order: 6"
results_df_new = data.frame(model_name=character(),
ar_order=numeric(), d_order=numeric(),ma_order=numeric(),
alpha_order = numeric(), beta_order = numeric(),
est_method = character(), mase_forecast = numeric(), mase_fitted = numeric(), aic = numeric(),
bic = numeric())
names_vec = colnames(results_df_new)
names(results_df_new) <- names_vec
len <- length(arma_garch_model_list)
for (i in 1:length(arma_garch_model_list)){
model_item = arma_garch_model_list[[i]]
results_df_new =
rbind(results_df_new,
data.frame(model_name = model_item@model_name,
ar_order = model_item@ar,
d_order = model_item@d,
ma_order = model_item@ma,
alpha_order = model_item@alpha_order,
beta_order = model_item@beta_order,
est_method= model_item@est_method,
mase_forecast= model_item@forc_mase,
mase_fitted = model_item@fitted_mase,
aic = model_item@aic,
bic = model_item@bic))
}kable(results_df_new[1:5,], caption = "Best MASE from the ARMA GARCH models") %>%
kable_styling("striped", full_width = F, font_size = 7, position = "center") %>%
column_spec(c(8,9), bold = T, color = "white", background = "#ed9121") #"#FF4040") | model_name | ar_order | d_order | ma_order | alpha_order | beta_order | est_method | mase_forecast | mase_fitted | aic | bic |
|---|---|---|---|---|---|---|---|---|---|---|
| sGARCH | 2 | 1 | 2 | 2 | 5 | norm | 0.9264105 | 20.19503 | -3.760093 | -3.725511 |
| sGARCH | 2 | 1 | 2 | 2 | 5 | std | 0.9264105 | 20.19503 | -3.760093 | -3.725511 |
| sGARCH | 2 | 1 | 2 | 6 | 6 | norm | 1.0132580 | 21.33855 | -3.769148 | -3.721266 |
| sGARCH | 2 | 1 | 2 | 6 | 6 | std | 1.0132580 | 21.33855 | -3.769148 | -3.721266 |
| sGARCH | 2 | 1 | 2 | 5 | 6 | norm | 1.0132603 | 21.33863 | -3.770088 | -3.724866 |
Based on the best MASE value let us pick the top 2 models and validate the model fits and the residual analysis.
garch_key = "21225"
arma_garch_model_list[[garch_key]]@model_fit##
## *---------------------------------*
## * GARCH Model Fit *
## *---------------------------------*
##
## Conditional Variance Dynamics
## -----------------------------------
## GARCH Model : sGARCH(2,5)
## Mean Model : ARFIMA(2,0,2)
## Distribution : norm
##
## Optimal Parameters
## ------------------------------------
## Estimate Std. Error t value Pr(>|t|)
## mu 0.001390 0.000690 2.0134e+00 0.044075
## ar1 1.035858 0.000990 1.0467e+03 0.000000
## ar2 -0.998858 0.000927 -1.0774e+03 0.000000
## ma1 -1.032832 0.001189 -8.6862e+02 0.000000
## ma2 0.997586 0.000154 6.4881e+03 0.000000
## omega 0.000075 0.000016 4.6591e+00 0.000003
## alpha1 0.227755 0.024642 9.2427e+00 0.000000
## alpha2 0.065700 0.019589 3.3538e+00 0.000797
## beta1 0.000001 0.118386 6.0000e-06 0.999995
## beta2 0.112942 0.043810 2.5780e+00 0.009937
## beta3 0.000000 0.067202 3.0000e-06 0.999998
## beta4 0.352884 0.052505 6.7210e+00 0.000000
## beta5 0.222517 0.047893 4.6461e+00 0.000003
##
## Robust Standard Errors:
## Estimate Std. Error t value Pr(>|t|)
## mu 0.001390 0.001011 1.374834 0.169183
## ar1 1.035858 0.001001 1035.055294 0.000000
## ar2 -0.998858 0.001056 -945.614376 0.000000
## ma1 -1.032832 0.001113 -928.081097 0.000000
## ma2 0.997586 0.000105 9466.937315 0.000000
## omega 0.000075 0.000052 1.428901 0.153033
## alpha1 0.227755 0.041839 5.443599 0.000000
## alpha2 0.065700 0.075095 0.874882 0.381638
## beta1 0.000001 0.350219 0.000002 0.999998
## beta2 0.112942 0.344155 0.328173 0.742781
## beta3 0.000000 0.246027 0.000001 0.999999
## beta4 0.352884 0.173653 2.032121 0.042141
## beta5 0.222517 0.221636 1.003972 0.315392
##
## LogLikelihood : 4015.619
##
## Information Criteria
## ------------------------------------
##
## Akaike -3.7601
## Bayes -3.7255
## Shibata -3.7602
## Hannan-Quinn -3.7474
##
## Weighted Ljung-Box Test on Standardized Residuals
## ------------------------------------
## statistic p-value
## Lag[1] 6.238 1.251e-02
## Lag[2*(p+q)+(p+q)-1][11] 20.217 0.000e+00
## Lag[4*(p+q)+(p+q)-1][19] 28.771 2.608e-08
## d.o.f=4
## H0 : No serial correlation
##
## Weighted Ljung-Box Test on Standardized Squared Residuals
## ------------------------------------
## statistic p-value
## Lag[1] 4.847e-05 0.9944
## Lag[2*(p+q)+(p+q)-1][20] 7.592e+00 0.7670
## Lag[4*(p+q)+(p+q)-1][34] 1.262e+01 0.8461
## d.o.f=7
##
## Weighted ARCH LM Tests
## ------------------------------------
## Statistic Shape Scale P-Value
## ARCH Lag[8] 0.01693 0.500 2.000 0.8965
## ARCH Lag[10] 4.54100 1.488 1.815 0.1689
## ARCH Lag[12] 5.12737 2.451 1.700 0.2921
##
## Nyblom stability test
## ------------------------------------
## Joint Statistic: 6.3296
## Individual Statistics:
## mu 0.25153
## ar1 0.16468
## ar2 0.05331
## ma1 0.20568
## ma2 0.14269
## omega 0.11527
## alpha1 0.04857
## alpha2 0.13845
## beta1 0.09126
## beta2 0.08816
## beta3 0.08931
## beta4 0.10273
## beta5 0.08191
##
## Asymptotic Critical Values (10% 5% 1%)
## Joint Statistic: 2.89 3.15 3.69
## Individual Statistic: 0.35 0.47 0.75
##
## Sign Bias Test
## ------------------------------------
## t-value prob sig
## Sign Bias 0.4756 0.6344
## Negative Sign Bias 0.3761 0.7069
## Positive Sign Bias 0.6787 0.4974
## Joint Effect 1.2431 0.7427
##
##
## Adjusted Pearson Goodness-of-Fit Test:
## ------------------------------------
## group statistic p-value(g-1)
## 1 20 317.7 4.576e-56
## 2 30 350.0 9.006e-57
## 3 40 376.8 7.420e-57
## 4 50 398.6 2.743e-56
##
##
## Elapsed time : 0.697582
We notice that the robust standard errors are mostly significant. The ar2, ma2 are significant where as the aplha2 and beta5 are not significant.
##
## please wait...calculating quantiles...
Fig 19: Residual check plots
Above (Fig 19.) are some of the model evaluation plots. The standardised residuals of the model seem to have a normal distribution with bit of tick tail towards left as we see the limits of the residuals are more towards left. The QQ plot also tells us the same story as we have thick tails. The ACF plots of the standardised residuals and squared standardised residuls seem to have improved with less autoregressive lags.
garch_key = "21266"
arma_garch_model_list[[garch_key]]@model_fit##
## *---------------------------------*
## * GARCH Model Fit *
## *---------------------------------*
##
## Conditional Variance Dynamics
## -----------------------------------
## GARCH Model : sGARCH(6,6)
## Mean Model : ARFIMA(2,0,2)
## Distribution : norm
##
## Optimal Parameters
## ------------------------------------
## Estimate Std. Error t value Pr(>|t|)
## mu 0.001274 0.000661 1.927723 0.053890
## ar1 0.047922 0.004579 10.464522 0.000000
## ar2 -0.933540 0.006154 -151.701789 0.000000
## ma1 -0.055695 0.004721 -11.797128 0.000000
## ma2 0.956894 0.001264 756.776734 0.000000
## omega 0.000086 0.000012 7.053052 0.000000
## alpha1 0.146507 0.021650 6.767199 0.000000
## alpha2 0.044823 0.014300 3.134441 0.001722
## alpha3 0.036970 0.018140 2.038059 0.041544
## alpha4 0.000000 0.016378 0.000008 0.999994
## alpha5 0.165718 0.026436 6.268641 0.000000
## alpha6 0.000000 0.022160 0.000000 1.000000
## beta1 0.000000 0.177711 0.000000 1.000000
## beta2 0.000000 0.144294 0.000000 1.000000
## beta3 0.000000 0.147607 0.000000 1.000000
## beta4 0.087130 0.098045 0.888672 0.374180
## beta5 0.505153 0.042876 11.781756 0.000000
## beta6 0.000000 0.059907 0.000000 1.000000
##
## Robust Standard Errors:
## Estimate Std. Error t value Pr(>|t|)
## mu 0.001274 0.000673 1.892695 0.058398
## ar1 0.047922 0.010289 4.657818 0.000003
## ar2 -0.933540 0.009401 -99.299314 0.000000
## ma1 -0.055695 0.002265 -24.588101 0.000000
## ma2 0.956894 0.001419 674.148995 0.000000
## omega 0.000086 0.000065 1.321036 0.186489
## alpha1 0.146507 0.044264 3.309819 0.000934
## alpha2 0.044823 0.056803 0.789097 0.430056
## alpha3 0.036970 0.042925 0.861266 0.389091
## alpha4 0.000000 0.070243 0.000002 0.999999
## alpha5 0.165718 0.086132 1.923991 0.054356
## alpha6 0.000000 0.145267 0.000000 1.000000
## beta1 0.000000 0.263522 0.000000 1.000000
## beta2 0.000000 0.671060 0.000000 1.000000
## beta3 0.000000 0.690081 0.000000 1.000000
## beta4 0.087130 0.461062 0.188977 0.850111
## beta5 0.505153 0.149161 3.386632 0.000708
## beta6 0.000000 0.366496 0.000000 1.000000
##
## LogLikelihood : 4030.258
##
## Information Criteria
## ------------------------------------
##
## Akaike -3.7691
## Bayes -3.7213
## Shibata -3.7693
## Hannan-Quinn -3.7516
##
## Weighted Ljung-Box Test on Standardized Residuals
## ------------------------------------
## statistic p-value
## Lag[1] 7.76 5.341e-03
## Lag[2*(p+q)+(p+q)-1][11] 16.50 0.000e+00
## Lag[4*(p+q)+(p+q)-1][19] 23.17 1.528e-05
## d.o.f=4
## H0 : No serial correlation
##
## Weighted Ljung-Box Test on Standardized Squared Residuals
## ------------------------------------
## statistic p-value
## Lag[1] 1.066 0.3019
## Lag[2*(p+q)+(p+q)-1][35] 11.645 0.9173
## Lag[4*(p+q)+(p+q)-1][59] 19.086 0.9722
## d.o.f=12
##
## Weighted ARCH LM Tests
## ------------------------------------
## Statistic Shape Scale P-Value
## ARCH Lag[13] 0.5748 0.500 2.000 0.4484
## ARCH Lag[15] 5.0299 1.495 1.873 0.1456
## ARCH Lag[17] 6.3139 2.478 1.780 0.2098
##
## Nyblom stability test
## ------------------------------------
## Joint Statistic: 19.6131
## Individual Statistics:
## mu 0.25563
## ar1 0.09448
## ar2 0.04504
## ma1 0.14111
## ma2 0.01832
## omega 0.15468
## alpha1 0.03218
## alpha2 0.16393
## alpha3 0.08950
## alpha4 0.27793
## alpha5 0.05851
## alpha6 0.75891
## beta1 0.24558
## beta2 0.36390
## beta3 0.29345
## beta4 0.11861
## beta5 0.08248
## beta6 0.30169
##
## Asymptotic Critical Values (10% 5% 1%)
## Joint Statistic: 3.83 4.14 4.73
## Individual Statistic: 0.35 0.47 0.75
##
## Sign Bias Test
## ------------------------------------
## t-value prob sig
## Sign Bias 1.3653 0.1723
## Negative Sign Bias 0.1059 0.9157
## Positive Sign Bias 0.2105 0.8333
## Joint Effect 2.9116 0.4055
##
##
## Adjusted Pearson Goodness-of-Fit Test:
## ------------------------------------
## group statistic p-value(g-1)
## 1 20 342.0 4.525e-61
## 2 30 365.3 7.602e-60
## 3 40 391.0 1.187e-59
## 4 50 415.0 1.940e-59
##
##
## Elapsed time : 0.9933701
We notice that the robust standard errors are mostly significant. The ar2, ma2 are significant where as the aplha6 and beta6 are not significant.
##
## please wait...calculating quantiles...
Fig 20. Residual check plots
Above (Fig 20.) the standardised residuals of the model is normally distributed and the residual limits are comparitively thinner on the left end compared to the previous model. The QQ plot has a very thick tails. The ACF plots of the standardized residuals has some auto regressive lages and the squared standardised residuls have just one autoregressive lags.
Overall our focus is on fitting a model that reduces the error term(MASE), we will consider the ARMA + GARCH model (2,2) (2,5).
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
#arima_key <- "ARIMA(2 , 1 , 1) - CSS"
garch_key <- "21225"
origSeries = ts(bitCoin_csv[,2], start = as.Date("04/27/13", format = "%m/%d/%y"))
fittedVals = ts(as.numeric(as.vector(arma_garch_model_list[[garch_key]]@fitted_val)),
start=c(2013, 117), frequency = 365)
# fittedVals = ts(arma_garch_model_list[[garch_key]]@fitted_val, start = as.Date("04/27/13", format = "%m/%d/%y"))
forcVals = ts(as.numeric(as.vector(arma_garch_model_list[[garch_key]]@forc_val)),
start = c(2019, 56), frequency = 365)
# forcVals = ts(arma_garch_model_list[[garch_key]]@forc_val,start = as.Date("03/04/18", format = "%m/%d/%y"))
plot(bitCoin_ts,
type="l",
lwd = 2,
col = c("#003366"),
main = "BitCoin Closing Price Series",
ylab = "Closing Price in $",
xlab = "Year")
lines(fittedVals, col = c("blue"), lwd = 2)
lines(forcVals, col = c("green"), lwd = 2)
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366","blue", "green"),
c("Original Series", "Fitted ARMA + GARCH(2,2)(2,5) Model", "Forecast Series"))
grid()Fig 21.: Fitted Curves to the BitCoin Closing Price Series
The model with the best MASE is ARMA + GARCH (2,2) (2,5).
The below table shows the values for the best forecast MASE as well as the fitted MASE.
| Contents | BEST MASE |
|---|---|
| Over forecast | 0.9264105 |
| Over fitted | 20.19503 |
Anon. (2018). ARIMA+GARCH Trading Strategy on the S&P500 Stock Market Index Using R | QuantStart. [online] Available at: https://www.quantstart.com/articles/ARIMA-GARCH-Trading-Strategy-on-the-SP500-Stock-Market-Index-Using-R [Accessed 26 May 2018].
Chan, K. and Ripley, B. (2018). [online] Available at: https://cran.r-project.org/web/packages/TSA/index.html. [Accessed 26 May 2018].
Hadley, W. (2018). Forecasting: Principles and Practice. [online] Otexts.org. Available at: http://otexts.org/fpp2/ [Accessed 26 May 2018].
Time Series Analysis With Applications in R, Jonathan D. Cryer and Kung-Sik Chan, <ISBN: 978-0-387-75958-6>.
#rm(list = ls())
library(easypackages)
checkForPack = c("knitr","car","jsonlite","lattice","stringr","sp","tidyr","forecast", "dplyr",
"readr","expsmooth","TSA","dynlm","Hmisc","car","AER","tseries","lmtest",
"FitAR","fUnitRoots","FSAdata", "kableExtra", "truncnorm", "rugarch", "lubridate")
checkNInst = checkForPack %in% rownames(installed.packages())
if ( any(!checkNInst) ) { install.packages( checkForPack[!checkNInst] ) }
libraries(checkForPack)
source('C:/Arjun/RMIT/7. Time Series/Project/ts-bitcoin/residual.analysis.R')
source('C:/Arjun/RMIT/7. Time Series/Project/ts-bitcoin/sort.score.R')
source('C:/Arjun/RMIT/7. Time Series/Project/ts-bitcoin/MASE.R')
load("C:\\Arjun\\RMIT\\7. Time Series\\Project\\ts-bitcoin\\Report_10_06_19.RData")
#source('C:/Arjun/RMIT/7. Time Series/Functions/residual.analysis.R')
set.seed(738)
forecasting_period = 10
testSet = c(3882.7, 3854.36, 3851.05, 3854.79, 3859.58, 3864.42, 3847.18, 3761.56, 3896.38, 3903.94)
bitCoin_csv = read.csv("bitCoin_Historical_Price.csv")
bitCoin_csv$Close = as.numeric(as.character(gsub(",","",bitCoin_csv$Close))) # Will convert factor to a numeric value by stripping the commas to avoid NA's
bitCoin_ts = ts(as.numeric(as.vector(bitCoin_csv$Close)), start=c(2013, 117), frequency = 365)
any(!is.finite(bitCoin_ts)) #checking for NA's
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(bitCoin_ts,
#axes = FALSE,
ylab='Closing Price in $',
xlab='Year',
type='l',
col = c("#003366"),
lwd=2,
main = "Time Series Plot of BitCoin Closing Prices (in $)")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366"),lwd=1.5,
c("Closing Price in $"))
grid()
bitCoin_split = bitCoin_csv
bitCoin_split$Date <- as.Date(bitCoin_split$Date, format = "%d/%m/%y")
class(bitCoin_split$Date)
bitCoin_split_17 = bitCoin_split[bitCoin_split$Date >= as.Date("2017-04-01"),]
bitcoints = ts(bitCoin_split_17$Close,
start = c(2017, as.numeric(format(as.Date("2017-04-01"), "%j"))),
frequency = 365)
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(bitcoints,
# axes = FALSE,
ylab='Closing Price in $',
xlab='Year',
type='l',
col = c("#003366"),
lwd=2,
main = "Time Series Plot of BitCoin Closing Prices after 2017 split (in $)")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366"),lwd=2,
c("Closing Price in $ after split"))
grid()
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(bitCoin_ts, main = "ACF plot for BitCoin Prices")
pacf(bitCoin_ts, main = "PACF plot for BitCoin Prices")
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(bitcoints, main = "ACF plot for BitCoin Prices post split")
pacf(bitcoints, main = "PACF plot for BitCoin Prices post split")
#adf_test_model <- adf.test(bitcoin_ts)
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
y = bitCoin_ts
x = zlag(bitCoin_ts)
index = 2:length(x)
cor = cor(y[index],x[index])
plot(y=bitCoin_ts,
x=zlag(bitCoin_ts),
ylab='Closing Price in $ after split',
xlab='Previous Year Closing Price in $',
main = "Relationship between successive data points",
col = c("#003366"))
legend("topleft",lty=3, bty = "n" ,text.width = 15, col=c("#003366"),lwd=4,
c(paste0("Correlation: ", round(cor,4))))
grid()
#lmModel = lm(bitCoin_ts~ time(bitCoin_ts))
summary(lmModel)
par(mar=c(5,4,4,3),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(bitCoin_ts,
ylab='Closing Price in $ after split',
xlab='Year',
type='l',
col = c("#003366"),
lwd = 2,
main = "Fitted Linear Curve to the BitCoin Closing Price Series")
#lines(as.vector(fitted(lmModel)), col="black", lwd = 1)
abline(lmModel, col = "red", lwd = 2)
legend("topleft",lty=1, text.width = 20, col=c("#003366","red"), bty = "n",
c("BitCoin Series", "Fitted Linear Curve"))
residual.analysis(lmModel)
testAdf = adf.test(bitCoin_ts)
testAdf
lambda = 0
bitCoin.log = log(bitCoin_ts)
round(min(bitCoin_ts),2) #checking for any negative values before
bitCoin.BoxCox = BoxCox.ar(bitCoin_ts, seq(-1,1,0.01), method = "ols")
lambda <- (as.numeric(bitCoin.BoxCox$ci[1]) + as.numeric(bitCoin.BoxCox$ci[2])) / 2
lambda
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(bitCoin.log,
# axes = FALSE,
ylab='Closing Price in $',
xlab='Year',
type='l',
col = c("#003366"),
lwd=2,
main = "Time Series Plot of Transformed BitCoin Closing Prices (in $)")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366"),lwd=2,
c("log transformed series"))
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(bitCoin.log, main = "ACF plot for tranformed BitCoin Prices")
pacf(bitCoin.log, main = "PACF plot for transformed BitCoin Prices")
testAdf = adf.test(bitCoin.log)
testAdf
diff.bitCoin.log = diff(bitCoin.log, differences = 1)
testAdf = adf.test(diff.bitCoin.log)
testAdf
diff.bitCoin = diff(bitCoin_ts, differences = 1)
ar(diff(diff.bitCoin))
testAdf = adfTest(diff.bitCoin, lags = 33)
testAdf
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
plot(diff.bitCoin.log,
# axes = FALSE,
ylab='Closing Price in $',
xlab='Year',
type='l',
col = c("#003366"),
lwd=1,
main = "Time Series Plot of Transformed BitCoin Closing Prices (in $)")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366"),lwd=1,
c("Log-tranformed Differenced Series"))
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(diff.bitCoin.log, main = "ACF plot for differenced series")
pacf(diff.bitCoin.log, main = "PACF plot for differenced series")
a = eacf(diff.bitCoin.log)
print(paste0("AR Max is: ", a$ar.max))
print(paste0("MA Max is: ", a$ma.ma))
par(mfrow=c(1,1))
res <- armasubsets(y=diff.bitCoin.log,nar=11,nma=11,y.name='test',ar.method='ols')
plot(res)
.GlobalEnv$.__C__Arima
.GlobalEnv$.__C__ARIMA
.GlobalEnv$.__C__forecast
#setOldClass(c("Arima", "ARIMA", "forecast"))
# flag = FALSE
forecasting_period = 10
# if (flag == TRUE) max_order = 2 else {
# max_order = 6
# smaller_max_order = 3
# }
arma_garch_model_list = list()
arima_model_list <- list()
#
# first_arima_model_list = list()
# last_arima_model_list = list()
# first_arma_garch_model_list = list()
# last_arma_garch_model_list = list()
setClass(Class="ArimaClass",
representation(
key="character",
model_name="character",
ar="numeric",
ma="numeric",
d="numeric",
est_method="character",
aic="numeric",
bic="numeric",
fitted_mase="numeric",
forc_mase="numeric",
fitted_val="numeric",
forc_val="numeric",
model_forc="forecast",
model_fit="ARIMA",
model="Arima"
)
)
setClass(Class="ARMA_GARCH_Models",
representation(
key="character",
model_name="character",
mean_flag="logical",
ar="numeric",
ma="numeric",
alpha_order="numeric",
beta_order="numeric",
d="numeric",
est_method = "character",
aic="numeric",
bic="numeric",
fitted_mase="numeric",
forc_mase="numeric",
fitted_val="numeric",
forc_val = "numeric",
model_forc="uGARCHforecast",
model_fit= "uGARCHfit"
)
)
generateARIMAModels<- function(tsobject, ar,d,ma, ts_arima_model_list, l, testSet,
call_code) {
m = "ML"
k <- paste('ARIMA(',toString(ar),' , ',toString(d),' , ', toString(ma), ')',' - ',m,sep='')
print(paste("Processing :",k,sep=""))
if (call_code == "Modelling" ) {
model <- arima(tsobject,order=c(ar,d,ma),method=m)
model_fit = Arima(bitCoin_ts, model=model, lambda=l)
} else {
model <- arima(log(tsobject),order=c(ar,d,ma),method=m)
model_fit = Arima(tsobject, model=model, lambda=l)
}
aic <- AIC(model)
bic <- AIC(model, k= log(length(tsobject)))
model_forc = forecast(model_fit,h=forecasting_period)
fitted_val = as.numeric(model_fit$fitted)
forc_val = as.numeric(model_forc$mean)
forc_mase = MASE(testSet, forc_val)[[1]][,1]
if (call_code == "Modelling" ) {
fitted_mase = MASE(as.numeric(bitCoin_ts), fitted_val)[[1]][,1]
} else {
fitted_mase = MASE(as.numeric(tsobject), fitted_val)[[1]][,1]
}
ts_arima_model_list[[k]] <- new("ArimaClass",
key=k,
ar=ar,
ma=ma,
d=d,
est_method=m,
aic=aic,
bic=bic,
fitted_mase = fitted_mase,
forc_mase = forc_mase,
model_name="ARIMA",
fitted_val = fitted_val,
forc_val = forc_val,
model_forc=model_forc,
model_fit=model_fit,
model = model)
m = "CSS"
k <- paste('ARIMA(',toString(ar),' , ',toString(d),' , ', toString(ma), ')',' - ',m,sep='')
print(paste("Processing :",k,sep=""))
if (call_code == "Modelling" ) {
model <- arima(tsobject,order=c(ar,d,ma),method=m)
model_fit = Arima(bitCoin_ts, model=model, lambda=l)
} else {
model <- arima(log(tsobject),order=c(ar,d,ma),method=m)
model_fit = Arima(tsobject, model=model, lambda=l)
}
model_forc = forecast(model_fit,h=forecasting_period)
fitted_val = as.numeric(model_fit$fitted)
forc_val = as.numeric(model_forc$mean)
forc_mase = MASE(testSet, forc_val)[[1]][,1]
if (call_code == "Modelling" ) {
fitted_mase = MASE(as.numeric(bitCoin_ts), fitted_val)[[1]][,1]
} else {
fitted_mase = MASE(as.numeric(tsobject), fitted_val)[[1]][,1]
}
ts_arima_model_list[[k]] <- new("ArimaClass",
key=k,
ar=ar,
ma=ma,
d=d,
est_method=m,
aic=aic,
bic=bic,
fitted_mase = fitted_mase,
forc_mase = forc_mase,
model_name="ARIMA",
fitted_val = fitted_val,
forc_val = forc_val,
model_forc=model_forc,
model_fit=model_fit,
model = model)
rm(m,k,aic,bic,model)
return (ts_arima_model_list)
}
arima_model_list = list()
for (nar in 1:8) {
for (nma in 1:8) {
arima_model_list <- generateARIMAModels(diff.bitCoin.log,
ar=nar, d=1, ma=nma,
arima_model_list, lambda,testSet,"Modelling")
}
}
results_df = data.frame(model_name=character(),
ar_order=numeric(), d_order=numeric(),ma_order=numeric(),
alpha_order = numeric(), beta_order = numeric(),
est_method = character(), mase_forecast = numeric(), mase_fitted = numeric(), aic = numeric(),
bic = numeric())
names_vec = colnames(results_df)
names(results_df) <- names_vec
len <- length(arima_model_list)
for(il in 1:len) {
index <- (gregexpr(pattern ='\\(', arima_model_list[[il]]@key)[[1]][1]) -1
temp <- data.frame(substring(text = arima_model_list[[il]]@key,1, index),
arima_model_list[[il]]@ar,
arima_model_list[[il]]@d,
arima_model_list[[il]]@ma,
NA,
NA,
arima_model_list[[il]]@est_method,
arima_model_list[[il]]@forc_mase,
arima_model_list[[il]]@fitted_mase,
arima_model_list[[il]]@aic,
arima_model_list[[il]]@bic)
names(temp)<-names_vec
results_df <- rbind(results_df,temp)
}
newDf <- arrange(results_df, mase_forecast, mase_fitted)
newDf <- subset(newDf, select = c(-alpha_order, -beta_order))
kable(newDf[1:5,], caption = "Best MASE from the ARIMA models - Top 5") %>%
kable_styling("striped", full_width = F, font_size = 7, position = "center") %>%
column_spec(c(6,7), bold = T, color = "white", background = "#ed9121") #"#FF4040")
print("Results for ARIMA(1,1,1)")
model_111_css = arima(diff.bitCoin.log,order=c(1,1,1),method='CSS')
coeftest(model_111_css)
print("Results for ARIMA(5,1,2)")
model_512_css = arima(diff.bitCoin.log,order=c(5,1,2),method='CSS')
coeftest(model_512_css)
print("Results for ARIMA(2,1,8)")
model_218_css = arima(diff.bitCoin.log,order=c(2,1,8),method='CSS')
coeftest(model_218_css)
print("Results for ARIMA(2,1,1)")
model_211_css = arima(diff.bitCoin.log,order=c(2,1,1),method='CSS')
coeftest(model_211_css)
print("Results for ARIMA(2,1,3)")
model_213_css = arima(diff.bitCoin.log,order=c(2,1,3),method='CSS')
coeftest(model_213_css)
print("Results for ARIMA(2,1,2)")
model_212_css = arima(diff.bitCoin.log,order=c(2,1,2),method='CSS')
coeftest(model_212_css)
print("Results for ARIMA(3,1,1)")
model_311_css = arima(diff.bitCoin.log,order=c(3,1,1),method='CSS')
coeftest(model_311_css)
kable(newDf[ c(4,10),], caption = "Comparing MASE of ARIMA(2,1,1) and ARIMA(2,1,2)") %>%
kable_styling("striped", full_width = F, font_size = 7, position = "left") %>%
column_spec(c(7,8), bold = T, color = "white", background = "#ed9121")
residual.analysis(model_211_css)
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
arima_key <- "ARIMA(2 , 1 , 1) - CSS"
plot(bitCoin_ts,
type="l",
lwd = 2,
col = c("#003366"),
main = "BitCoin Closing Price Series",
ylab = "Closing Price in $",
xlab = "Year")
abline(lmModel, col = "red", lwd = 2)
lines(arima_model_list[[arima_key]]@model_fit$fitted, col= c("green"), lwd = 2)
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366","red", "green"),
c("Original Series", "Fitted Linear Model", "Fitted ARIMA(2,1,1) Model"))
grid()
forecasting_period = 10
fit = Arima(bitcoints, model = model_212_css, lambda=0.5)
bitCoinDataForecast = forecast(fit,h=forecasting_period)
knitr::kable(bitCoinDataForecast,
caption = "10-Step ahead Forecast for Closing Prices of Bitcoin Series") %>%
kable_styling("striped", full_width = F, font_size = 12) %>%
column_spec(2, bold = T, color = "white", background = "#ed9121")
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1)
plot(bitCoinDataForecast,
type="l",
lwd = 2,
col = c("#003366"),
main = "BitCoin Closing Price Series",
ylab = "Closing Price in $",
xlab = "Year")
# lines(fitted_series,col = c("red"), type="s")
# lines(forecast_series,col = c("green"), type="o")
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366", "blue"),
c("Bit coin closing price (in $)", "10 step ahead Forecast"))
grid()
max_order = 6
arma_garch_modelling = function(ts, return_series, log_series, validation_set, model_name, garch_alpha_order, garch_beta_order, ar_order, ma_order, mean_flag, dist_model, arma_garch_model_list) {
garch_order = c(garch_alpha_order,garch_beta_order)
arma_order = c(ar_order,ma_order)
model_spec = ugarchspec(variance.model = list(model = model_name , garchOrder = garch_order),
mean.model = list(armaOrder = arma_order, include.mean = mean_flag),
distribution.model = dist_model)
model_fit = ugarchfit(spec=model_spec,data=return_series)
fitted <- exp(diffinv(as.vector(fitted(model_fit)), differences = 1, xi = as.vector(log_series[1])))
fitted_mase <- MASE(as.vector(ts), as.vector(fitted))[[1]][,1]
forecast_model <- ugarchforecast(model_fit, data = return_series, n.ahead = forecasting_period)
forecast_fitted <- exp(diffinv(as.vector(forecast_model@forecast$seriesFor),
differences = 1, xi = log_series[length(log_series)]))
forecast_mase <- MASE(as.vector(testSet), as.vector(forecast_fitted[2:11]))[[1]][,1]
aic = infocriteria(model_fit)[1,1]
bic = infocriteria(model_fit)[2,1]
if (mean_flag == TRUE) d_order = 1 else d_order = 0
key = paste(ar_order,d_order,ma_order,garch_alpha_order,garch_beta_order,sep = "")
arma_garch_model_list[[key]] = new("ARMA_GARCH_Models",
key=key,
model_name = model_name,
mean_flag = mean_flag,
ar= ar_order,
d = d_order,
ma= ma_order,
alpha_order = garch_alpha_order,
beta_order = garch_beta_order,
est_method = dist_model,
fitted_mase=fitted_mase,
forc_mase=forecast_mase,
aic=aic,
bic=bic,
fitted_val=fitted,
forc_val=forecast_fitted,
model_fit=model_fit,
model_forc=forecast_model)
rm(garch_order,arma_order, model_spec, model_fit, fitted, fitted_mase, forecast_model,forecast_fitted,forecast_mase, aic, bic, key, d_order)
return (arma_garch_model_list)
}
model_211_css.r = model_211_css$residuals
abs.res = abs(model_211_css.r)
sq.res = model_211_css.r^2
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(abs.res, ci.type = "ma", main = "ACF plot for absolute residual series")
pacf(abs.res, main = "PACF plot for absolute residual series")
eacf(abs.res)
par(mar=c(5,4,4.5,1),mfrow=c(1,2),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
acf(sq.res, ci.type = "ma", main = "ACF plot for squared residual series")
pacf(sq.res, main = "PACF plot for squared residual series")
eacf(sq.res)
ar_order_seq = seq(1:2)
ma_order_seq = seq(1:2)
garch_alpha_order_seq = seq (1:max_order)
garch_beta_order_seq = seq ( 1:max_order)
mean_flag_seq = c(TRUE,FALSE)
distribution = c("norm", "std")
for(ar_order in ar_order_seq) {
for (ma_order in ma_order_seq) {
for (garch_alpha_order in garch_alpha_order_seq) {
for (garch_beta_order in garch_beta_order_seq) {
for (mean_flag in mean_flag_seq){
model_name = "sGARCH"
dist_model = distribution
print(paste("ar_order :",ar_order,"ma_order :",ma_order,
"garch_alpha_order:",garch_alpha_order,
"garch_beta_order:", garch_beta_order,
sep=" "))
# try catch start
tryCatch({
# TO-DO: method selection based on original or return series based on best MASE
arma_garch_model_list = arma_garch_modelling(
bitCoin_ts,
diff.bitCoin.log,
bitCoin.log,
testSet, model_name,
garch_alpha_order,garch_beta_order,
ar_order, ma_order,mean_flag,
dist_model, arma_garch_model_list)
},error=function(e){
print(paste(e," for the model ", "ar_order :",ar_order,"ma_order :",ma_order,
"garch_alpha_order:",garch_alpha_order,
"garch_beta_order:", garch_beta_order,
sep=" "))
})
# try catch end
} # end of mean flag seq
}# end of garch_beta_order seq
}# end of garch_alpha_order seq
}# end of ma_order_seq seq
}# end of ar_order_seq seq
results_df_new = data.frame(model_name=character(),
ar_order=numeric(), d_order=numeric(),ma_order=numeric(),
alpha_order = numeric(), beta_order = numeric(),
est_method = character(), mase_forecast = numeric(), mase_fitted = numeric(), aic = numeric(),
bic = numeric())
names_vec = colnames(results_df_new)
names(results_df_new) <- names_vec
len <- length(arma_garch_model_list)
for (i in 1:length(arma_garch_model_list)){
model_item = arma_garch_model_list[[i]]
results_df_new =
rbind(results_df_new,
data.frame(model_name = model_item@model_name,
ar_order = model_item@ar,
d_order = model_item@d,
ma_order = model_item@ma,
alpha_order = model_item@alpha_order,
beta_order = model_item@beta_order,
est_method= model_item@est_method,
mase_forecast= model_item@forc_mase,
mase_fitted = model_item@fitted_mase,
aic = model_item@aic,
bic = model_item@bic))
}
results_df_new <- arrange(results_df_new, mase_forecast, mase_fitted)
kable(results_df_new[1:5,], caption = "Best MASE from the ARMA GARCH models") %>%
kable_styling("striped", full_width = F, font_size = 7, position = "center") %>%
column_spec(c(8,9), bold = T, color = "white", background = "#ed9121") #"#FF4040")
garch_key = "21225"
arma_garch_model_list[[garch_key]]@model_fit
plot(arma_garch_model_list[[garch_key]]@model_fit)
garch_key = "21266"
arma_garch_model_list[[garch_key]]@model_fit
plot(arma_garch_model_list[[garch_key]]@model_fit)
garch_key = "11525"
arma_garch_model_list[[garch_key]]@model_fit
plot(arma_garch_model_list[[garch_key]]@model_fit)
par(mar=c(5,4,4.5,1),cex.main=1, cex.lab=1, cex.axis=1) #c(bottom, left, top, right)
#arima_key <- "ARIMA(2 , 1 , 1) - CSS"
garch_key <- "21225"
origSeries = ts(bitCoin_csv[,2], start = as.Date("04/27/13", format = "%m/%d/%y"))
fittedVals = ts(as.numeric(as.vector(arma_garch_model_list[[garch_key]]@fitted_val)),
start=c(2013, 117), frequency = 365)
# fittedVals = ts(arma_garch_model_list[[garch_key]]@fitted_val, start = as.Date("04/27/13", format = "%m/%d/%y"))
forcVals = ts(as.numeric(as.vector(arma_garch_model_list[[garch_key]]@forc_val)),
start = c(2019, 56), frequency = 365)
# forcVals = ts(arma_garch_model_list[[garch_key]]@forc_val,start = as.Date("03/04/18", format = "%m/%d/%y"))
plot(bitCoin_ts,
type="l",
lwd = 2,
col = c("#003366"),
main = "BitCoin Closing Price Series",
ylab = "Closing Price in $",
xlab = "Year")
lines(fittedVals, col = c("blue"), lwd = 2)
lines(forcVals, col = c("green"), lwd = 2)
legend("topleft",lty=1, bty = "n" ,text.width = 15, col=c("#003366","blue", "green"),
c("Original Series", "Fitted ARMA + GARCH(2,2)(2,5) Model", "Forecast Series"))
grid()save.image("C:\\Arjun\\RMIT\\7. Time Series\\Project\\ts-bitcoin\\Report_10_06_19.RData")
print("Done")## [1] "Done"