ARIMAX DC KELOMPOK 3

Pandu Henanda Saputra (G1401201043)

2023-04-09

PACKAGES

library(rmdformats)
library(dplyr)
## 
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
## 
##     filter, lag
## The following objects are masked from 'package:base':
## 
##     intersect, setdiff, setequal, union
library(readxl)
library(forecast)
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
library(TTR)
library(smooth)
## Loading required package: greybox
## Package "greybox", v1.0.8 loaded.
## This is package "smooth", v3.2.0
## 
## Attaching package: 'smooth'
## The following object is masked from 'package:TTR':
## 
##     lags
library(Mcomp)
library(knitr)
library(tseries)
library(readr)
library(TSA)
## Registered S3 methods overwritten by 'TSA':
##   method       from    
##   fitted.Arima forecast
##   plot.Arima   forecast
## 
## Attaching package: 'TSA'
## The following object is masked from 'package:readr':
## 
##     spec
## The following objects are masked from 'package:stats':
## 
##     acf, arima
## The following object is masked from 'package:utils':
## 
##     tar
library(ggplot2)
library(MLmetrics)
## 
## Attaching package: 'MLmetrics'
## The following objects are masked from 'package:greybox':
## 
##     MAE, MAPE, MSE
## The following object is masked from 'package:base':
## 
##     Recall
library(cowplot)
library(gridExtra)
## 
## Attaching package: 'gridExtra'
## The following object is masked from 'package:dplyr':
## 
##     combine
library(gtable)
library(MASS)
## 
## Attaching package: 'MASS'
## The following object is masked from 'package:dplyr':
## 
##     select
library(lmtest)
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
library(GGally)
## Registered S3 method overwritten by 'GGally':
##   method from   
##   +.gg   ggplot2
library(lubridate)
## 
## Attaching package: 'lubridate'
## The following object is masked from 'package:cowplot':
## 
##     stamp
## The following object is masked from 'package:greybox':
## 
##     hm
## The following objects are masked from 'package:base':
## 
##     date, intersect, setdiff, union
library(caret)
## Loading required package: lattice
## Registered S3 method overwritten by 'lava':
##   method     from   
##   print.pcor greybox
## 
## Attaching package: 'caret'
## The following objects are masked from 'package:MLmetrics':
## 
##     MAE, RMSE
## The following object is masked from 'package:greybox':
## 
##     MAE
library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ forcats 1.0.0     ✔ tibble  3.2.1
## ✔ purrr   1.0.1     ✔ tidyr   1.3.0
## ✔ stringr 1.5.0
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ gridExtra::combine() masks dplyr::combine()
## ✖ dplyr::filter()      masks stats::filter()
## ✖ lubridate::hm()      masks greybox::hm()
## ✖ dplyr::lag()         masks stats::lag()
## ✖ purrr::lift()        masks caret::lift()
## ✖ MASS::select()       masks dplyr::select()
## ✖ TSA::spec()          masks readr::spec()
## ✖ tidyr::spread()      masks greybox::spread()
## ✖ lubridate::stamp()   masks cowplot::stamp()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors

DATA

Data FKP APBD

# FKTP =  3
apbd <- read_excel("C:/Users/sistiawan/Downloads/Pandu/FKP12=3_mingguanDC.xlsx")
# membuat dummy variabel hari libur
data <- apbd %>%
  mutate(date = make_date(tahun, 1, 1) + weeks(minggu - 1))

# Mengubah kolom minggu menjadi tipe data faktor
data$minggu <- as.factor(data$minggu)
                         
# apus data yang ga 7 hari
data <- data[-c(53, 106,159), ]

Data Peubah X (Hari Libur)

# Menambahkan variabel dummy untuk hari libur
# Tanggal hari libur dimasukkan secara manual
data$hari_libur <- 0 # inisialisasi semua nilai variabel dummy dengan 0
data$hari_libur[data$tahun == 2019 & data$minggu == 1] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 6] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 10] <- 1 
data$hari_libur[data$tahun == 2019 & data$minggu == 14] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 16] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 16] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 18] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 20] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 22] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 22] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 23] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 23] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 32] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 33] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 35] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 45] <- 1
data$hari_libur[data$tahun == 2019 & data$minggu == 52] <- 1
# 2020
data$hari_libur[data$tahun == 2020 & data$minggu == 1] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 8] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 12] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 13] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 15] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 18] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 19] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 21] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 22] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 31] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 33] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 34] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 44] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 52] <- 1
data$hari_libur[data$tahun == 2020 & data$minggu == 53] <- 1
#2021

data$hari_libur[data$tahun == 2021 & data$minggu == 1] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 7] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 10] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 11] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 14] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 18] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 19] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 20] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 21] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 22] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 29] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 32] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 33] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 42] <- 1
data$hari_libur[data$tahun == 2021 & data$minggu == 52] <- 1
data1 <- data$total_kedatangan
data1 <- log(data1)

Eksplorasi Data

Plot Time Series

#Mengubah data ke dalam bentuk time series
data.ts<-ts(data1, frequency = 52, start = c(2019,1), end = c(2021,52))

# Box-Cox
#lambda <- BoxCox.lambda(data.ts)
# data.ts <- BoxCox(data.ts, lambda)

#Plot semua data
plot(data.ts,xlab ="Tahun", ylab = "Jumlah Kedatangan", col="black", main = "Plot Deret Waktu Data")
points(data.ts)

Splitting Data

data.train <- ts(data.ts[1:124])
data.test <- ts(data.ts[125:156])

#Time Series Data
training.ts<-ts(data.train,frequency=52, start = c(2019,1), end = c(2021,20))

testing.ts<-ts(data.test,frequency=52, start = c(2021,21), end = c(2021,52))

Plot Splitting Data

plot(training.ts, xlab ="Periode", ylab = "Jumlah Kedatangan", col="red", main = "Plot Data Training")
points(training.ts)

plot(testing.ts, xlab ="Periode", ylab = "Jumlah Kedatangan", col="red", main = "Plot Data Testing")
points(testing.ts)

Plot Data Training dan testing FKTP 12.3

ts.plot(data.ts, xlab = "Periode", ylab ="Jumlah Kedatangan", 
        main = "Plot Deret Waktu Data Jumlah kedatangan FKTP APBD")
lines(training.ts, col = "blue")
lines(testing.ts, col="Red")
legend("bottomleft",c("Data Training","Data Testing"), 
       lty=1, col=c("blue","red"), cex=0.8)
abline(v=2021.38, col=c("black"), lty=1, lwd=1)

Plot ACF

acf(data.train, lag.max = 24, main = "Plot ACF")

Plot PACF

pacf(data.train, lag.max = 24, main = "Plot PACF")

adf.test(data.train)
## 
##  Augmented Dickey-Fuller Test
## 
## data:  data.train
## Dickey-Fuller = -2.7966, Lag order = 4, p-value = 0.2455
## alternative hypothesis: stationary

Plot Differencing

data.dif1<-diff(data.train,differences = 1) 
plot.ts(ts(data.dif1,frequency=52, start = c(2019,1), end = c(2021,52)),lty=1,xlab = "Periode", ylab= "Data Inflasi Pembedaan 1", main="Plot Differencing Data Inflasi")

adf.test(data.dif1)
## Warning in adf.test(data.dif1): p-value smaller than printed p-value
## 
##  Augmented Dickey-Fuller Test
## 
## data:  data.dif1
## Dickey-Fuller = -5.8605, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary

Plot ACF Differencing

acf(data.dif1, lag.max = 36, main = "Plot ACF Setelah Differencing satu kali")

Plot PACF Differencing

pacf(data.dif1, lag.max = 48, main = "Plot PACF Setelah Differencing satu kali")

Plot EACF Differencing

eacf(data.dif1)
## AR/MA
##   0 1 2 3 4 5 6 7 8 9 10 11 12 13
## 0 x x o o o o o o o o x  o  x  o 
## 1 x o o o o o o o o o x  o  x  o 
## 2 x x x o o o o o o o o  o  o  o 
## 3 o o o o o o o o o o o  o  o  o 
## 4 x o o x o o o o o o o  o  o  o 
## 5 x o o x x o o o o o o  o  o  o 
## 6 o o o x o o o o o o o  o  o  o 
## 7 x x x x o o o o o o o  o  o  o

ARIMA(0,1.2) ARIMA(1,1,2) ARIMA(2,1,3) ARIMA(3,1,0) ARIMA(3,1,2) ARIMA(3,1,3)

012 112 213 310 312 313

Kandidat Model Tentatif

model1 <- Arima(data.dif1, order=c(0,0,2), method="ML") 
model2 <- Arima(data.dif1, order=c(1,0,2), method="ML")   
model3 <- Arima(data.dif1, order=c(2,0,3), method="ML")  
model4 <- Arima(data.dif1, order=c(3,0,0), method="ML")
model5 <- Arima(data.dif1, order=c(3,0,2), method="ML")
model6 <- Arima(data.dif1, order=c(3,0,3), method="ML")
summary(model1)
## Series: data.dif1 
## ARIMA(0,0,2) with non-zero mean 
## 
## Coefficients:
##           ma1      ma2     mean
##       -0.3225  -0.2399  -0.0022
## s.e.   0.0852   0.0833   0.0071
## 
## sigma^2 = 0.03207:  log likelihood = 38.37
## AIC=-68.74   AICc=-68.4   BIC=-57.49
## 
## Training set error measures:
##                        ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.0006555485 0.1768973 0.1186453 100.5349 198.6656 0.5709096
##                     ACF1
## Training set 0.008255851
coeftest(model1)
## 
## z test of coefficients:
## 
##             Estimate Std. Error z value  Pr(>|z|)    
## ma1       -0.3225297  0.0851743 -3.7867 0.0001527 ***
## ma2       -0.2398663  0.0832628 -2.8808 0.0039663 ** 
## intercept -0.0021807  0.0070874 -0.3077 0.7583146    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(model2)
## Series: data.dif1 
## ARIMA(1,0,2) with non-zero mean 
## 
## Coefficients:
##          ar1      ma1      ma2     mean
##       0.1103  -0.4225  -0.2055  -0.0022
## s.e.  0.3032   0.2913   0.1322   0.0068
## 
## sigma^2 = 0.03231:  log likelihood = 38.44
## AIC=-66.88   AICc=-66.36   BIC=-52.82
## 
## Training set error measures:
##                        ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.0007087918 0.1767907 0.1183458 98.69415 197.2988 0.5694683
##                      ACF1
## Training set 0.0005879269
coeftest(model2)
## 
## z test of coefficients:
## 
##             Estimate Std. Error z value Pr(>|z|)
## ar1        0.1102757  0.3031653  0.3637   0.7160
## ma1       -0.4224822  0.2912586 -1.4505   0.1469
## ma2       -0.2054922  0.1322459 -1.5539   0.1202
## intercept -0.0021762  0.0067842 -0.3208   0.7484
summary(model3)
## Series: data.dif1 
## ARIMA(2,0,3) with non-zero mean 
## 
## Coefficients:
##          ar1      ar2      ma1     ma2      ma3     mean
##       0.1885  -0.9856  -0.5608  1.0545  -0.4355  -0.0025
## s.e.  0.0221   0.0186   0.0943  0.0805   0.1043   0.0089
## 
## sigma^2 = 0.02912:  log likelihood = 43.62
## AIC=-73.25   AICc=-72.27   BIC=-53.56
## 
## Training set error measures:
##                        ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.0008016482 0.1664203 0.1113209 117.4819 212.7666 0.5356653
##                   ACF1
## Training set 0.0475635
coeftest(model3)
## 
## z test of coefficients:
## 
##             Estimate Std. Error  z value  Pr(>|z|)    
## ar1        0.1884586  0.0220820   8.5345 < 2.2e-16 ***
## ar2       -0.9856254  0.0186476 -52.8553 < 2.2e-16 ***
## ma1       -0.5607821  0.0943371  -5.9444 2.774e-09 ***
## ma2        1.0544966  0.0805469  13.0917 < 2.2e-16 ***
## ma3       -0.4355478  0.1042990  -4.1760 2.967e-05 ***
## intercept -0.0024790  0.0088924  -0.2788    0.7804    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(model4)
## Series: data.dif1 
## ARIMA(3,0,0) with non-zero mean 
## 
## Coefficients:
##           ar1      ar2      ar3     mean
##       -0.3064  -0.3387  -0.2360  -0.0021
## s.e.   0.0878   0.0865   0.0875   0.0084
## 
## sigma^2 = 0.0315:  log likelihood = 39.97
## AIC=-69.94   AICc=-69.42   BIC=-55.88
## 
## Training set error measures:
##                        ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.0003715802 0.1745741 0.1177095 110.4362 218.4903 0.5664066
##                     ACF1
## Training set 0.002265008
coeftest(model4)
## 
## z test of coefficients:
## 
##             Estimate Std. Error z value  Pr(>|z|)    
## ar1       -0.3064488  0.0877711 -3.4915 0.0004804 ***
## ar2       -0.3387336  0.0864566 -3.9180  8.93e-05 ***
## ar3       -0.2360357  0.0874699 -2.6985 0.0069657 ** 
## intercept -0.0021455  0.0084303 -0.2545 0.7991108    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(model5)
## Series: data.dif1 
## ARIMA(3,0,2) with non-zero mean 
## 
## Coefficients:
##           ar1      ar2      ar3     ma1      ma2     mean
##       -0.5841  -0.2608  -0.2840  0.2856  -0.1667  -0.0021
## s.e.   0.4296   0.2860   0.1105  0.4408   0.3617   0.0083
## 
## sigma^2 = 0.032:  log likelihood = 40.04
## AIC=-66.08   AICc=-65.1   BIC=-46.39
## 
## Training set error measures:
##                        ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.0003793657 0.1744678 0.1172731 90.43504 217.6547 0.5643067
##                      ACF1
## Training set -0.003505236
coeftest(model5)
## 
## z test of coefficients:
## 
##             Estimate Std. Error z value Pr(>|z|)  
## ar1       -0.5840950  0.4296340 -1.3595  0.17398  
## ar2       -0.2607872  0.2860356 -0.9117  0.36191  
## ar3       -0.2840352  0.1105436 -2.5694  0.01019 *
## ma1        0.2855757  0.4407746  0.6479  0.51705  
## ma2       -0.1666650  0.3616799 -0.4608  0.64494  
## intercept -0.0021392  0.0083345 -0.2567  0.79744  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(model6)
## Series: data.dif1 
## ARIMA(3,0,3) with non-zero mean 
## 
## Coefficients:
##          ar1      ar2      ar3     ma1      ma2     ma3     mean
##       -0.539  -0.2624  -0.3535  0.2424  -0.1426  0.0929  -0.0022
## s.e.   0.355   0.2663   0.2251  0.3647   0.3167  0.2674   0.0088
## 
## sigma^2 = 0.03225:  log likelihood = 40.09
## AIC=-64.18   AICc=-62.92   BIC=-41.68
## 
## Training set error measures:
##                      ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.00035259 0.1743931 0.1176408 93.55257 223.6376 0.5660761
##                      ACF1
## Training set -0.004356986
coeftest(model6)
## 
## z test of coefficients:
## 
##             Estimate Std. Error z value Pr(>|z|)
## ar1       -0.5389594  0.3550048 -1.5182   0.1290
## ar2       -0.2623985  0.2662609 -0.9855   0.3244
## ar3       -0.3535207  0.2251346 -1.5703   0.1164
## ma1        0.2424185  0.3646767  0.6647   0.5062
## ma2       -0.1425794  0.3166551 -0.4503   0.6525
## ma3        0.0929124  0.2673808  0.3475   0.7282
## intercept -0.0021576  0.0087611 -0.2463   0.8055

012 3 (1) 112 5 (2) 213 1 (3) 310 2 (4) 312 6 (5) 313 4 (6)

Accuracy Kandidat Model Tentatif

#AIC ARIMA dan Signifikansi Parameter
modelaccuracy<-data.frame(
  "Model"=c("ARIMA(0,1,2)","ARIMA(1,1,2)", "ARIMA(2,1,3)","ARIMA(3,1,0)","ARIMA(3,1,2)","ARIMA(3,1,3)"),
  "AIC"=c(model1$aic,model2$aic, model3$aic, model4$aic, model5$aic, model6$aic),
  "BIC"=c(model1$bic,model2$bic, model3$bic, model4$bic, model5$bic, model6$bic),
  "Signifikansi"=c("Signifikan","Tidak Signifikan","Signifikan"," Signifikan","Tidak Signifikan","Tidak Signifikan"))

modelaccuracy
##          Model       AIC       BIC     Signifikansi
## 1 ARIMA(0,1,2) -68.74187 -57.49313       Signifikan
## 2 ARIMA(1,1,2) -66.87700 -52.81607 Tidak Signifikan
## 3 ARIMA(2,1,3) -73.24504 -53.55975       Signifikan
## 4 ARIMA(3,1,0) -69.93753 -55.87661       Signifikan
## 5 ARIMA(3,1,2) -66.07717 -46.39188 Tidak Signifikan
## 6 ARIMA(3,1,3) -64.18094 -41.68346 Tidak Signifikan

dari AIC dan signifikansi parameter, dipilihlah model ARIMA(2,1,3).

Diagnostik Model

Overfitting

model3b <- Arima(data.dif1, order=c(3,0,3), method="ML")
model3c <- Arima(data.dif1, order=c(2,0,4), method = "ML")

Perbandingan Model dengan Model Overfitting

summary(model3)
## Series: data.dif1 
## ARIMA(2,0,3) with non-zero mean 
## 
## Coefficients:
##          ar1      ar2      ma1     ma2      ma3     mean
##       0.1885  -0.9856  -0.5608  1.0545  -0.4355  -0.0025
## s.e.  0.0221   0.0186   0.0943  0.0805   0.1043   0.0089
## 
## sigma^2 = 0.02912:  log likelihood = 43.62
## AIC=-73.25   AICc=-72.27   BIC=-53.56
## 
## Training set error measures:
##                        ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.0008016482 0.1664203 0.1113209 117.4819 212.7666 0.5356653
##                   ACF1
## Training set 0.0475635
coeftest(model3)
## 
## z test of coefficients:
## 
##             Estimate Std. Error  z value  Pr(>|z|)    
## ar1        0.1884586  0.0220820   8.5345 < 2.2e-16 ***
## ar2       -0.9856254  0.0186476 -52.8553 < 2.2e-16 ***
## ma1       -0.5607821  0.0943371  -5.9444 2.774e-09 ***
## ma2        1.0544966  0.0805469  13.0917 < 2.2e-16 ***
## ma3       -0.4355478  0.1042990  -4.1760 2.967e-05 ***
## intercept -0.0024790  0.0088924  -0.2788    0.7804    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(model3b)
## Series: data.dif1 
## ARIMA(3,0,3) with non-zero mean 
## 
## Coefficients:
##          ar1      ar2      ar3     ma1      ma2     ma3     mean
##       -0.539  -0.2624  -0.3535  0.2424  -0.1426  0.0929  -0.0022
## s.e.   0.355   0.2663   0.2251  0.3647   0.3167  0.2674   0.0088
## 
## sigma^2 = 0.03225:  log likelihood = 40.09
## AIC=-64.18   AICc=-62.92   BIC=-41.68
## 
## Training set error measures:
##                      ME      RMSE       MAE      MPE     MAPE      MASE
## Training set 0.00035259 0.1743931 0.1176408 93.55257 223.6376 0.5660761
##                      ACF1
## Training set -0.004356986
coeftest(model3b)
## 
## z test of coefficients:
## 
##             Estimate Std. Error z value Pr(>|z|)
## ar1       -0.5389594  0.3550048 -1.5182   0.1290
## ar2       -0.2623985  0.2662609 -0.9855   0.3244
## ar3       -0.3535207  0.2251346 -1.5703   0.1164
## ma1        0.2424185  0.3646767  0.6647   0.5062
## ma2       -0.1425794  0.3166551 -0.4503   0.6525
## ma3        0.0929124  0.2673808  0.3475   0.7282
## intercept -0.0021576  0.0087611 -0.2463   0.8055
summary(model3c)
## Series: data.dif1 
## ARIMA(2,0,4) with non-zero mean 
## 
## Coefficients:
##           ar1      ar2      ma1      ma2      ma3     ma4     mean
##       -0.2221  -0.2246  -0.0775  -0.0948  -0.1806  0.0833  -0.0021
## s.e.   0.7578   0.3828   0.7527   0.4811   0.2306  0.1300   0.0080
## 
## sigma^2 = 0.03247:  log likelihood = 39.67
## AIC=-63.35   AICc=-62.09   BIC=-40.85
## 
## Training set error measures:
##                        ME      RMSE      MAE      MPE     MAPE      MASE
## Training set 0.0004435235 0.1750041 0.117434 102.2602 201.7506 0.5650808
##                      ACF1
## Training set -0.003953231
coeftest(model3c)
## 
## z test of coefficients:
## 
##             Estimate Std. Error z value Pr(>|z|)
## ar1       -0.2220988  0.7578407 -0.2931   0.7695
## ar2       -0.2245611  0.3827523 -0.5867   0.5574
## ma1       -0.0774965  0.7526513 -0.1030   0.9180
## ma2       -0.0947915  0.4811441 -0.1970   0.8438
## ma3       -0.1806222  0.2305982 -0.7833   0.4335
## ma4        0.0832568  0.1300067  0.6404   0.5219
## intercept -0.0021277  0.0080407 -0.2646   0.7913

Pemilihan Model Terbaik dan Uji Sisaan

#Diagnostik Sisaan
sisaan <- model3$residuals

#kenormalan
shapiro.test(sisaan)
## 
##  Shapiro-Wilk normality test
## 
## data:  sisaan
## W = 0.90191, p-value = 1.858e-07
#sisaan saling bebas
Box.test(sisaan, type = "Ljung")
## 
##  Box-Ljung test
## 
## data:  sisaan
## X-squared = 0.2851, df = 1, p-value = 0.5934

Forecasting

Plot Forecasting

ramalan <- forecast(Arima(data.ts, order=c(3,1,4),method="ML",include.drift = TRUE),h = length(data.test))
data.ramalan <- ramalan$mean
data.ramalan.ts <- ts(data.ramalan, start = 2022, frequency = 52)
plot(ramalan,col="black",col.sub ="black",col.axis="black",
     col.lab="black",col.main="black",lwd=2)
box(col="black",lwd=2)

Nilai Forecasting

hasilforecast<-matrix(data=c(data.ramalan[1:10]), nrow = 10, ncol = 1)
colnames(hasilforecast)<-c("Hasil Forecast")
head(hasilforecast)
##      Hasil Forecast
## [1,]       7.348465
## [2,]       7.317303
## [3,]       7.234952
## [4,]       7.231979
## [5,]       7.335816
## [6,]       7.345531

Validasi Model

error <- data.frame(data.test)-data.frame(data.ramalan[1:32]) 

## SSE (Sum Square Error)
SSE <- sum(error^2, na.rm = T)

## MSE (Mean Squared Error)
MSE<- sapply(error^2, mean, na.rm = T)

## RMSE (Root Mean Square Error)
RMSE <- sqrt(MSE)

## MAD (Mean Absolute Deviation)
MAD <- sapply(abs(error), mean, na.rm = T)

## MAPE (Mean Absolute Percentage Error)
r.error <- (error/data.frame(data.test))*100 # Relative Error
MAPE <- sapply(abs(r.error), mean, na.rm = T)

akurasifarima <- data.frame(
  "Ukuran Keakuratan" = c("SSE", "MSE", "MAPE", "RMSE", "MAD"), 
  "Forecasting" = c(SSE, MSE, MAPE, RMSE, MAD))
akurasifarima
##   Ukuran.Keakuratan Forecasting
## 1               SSE   5.4152528
## 2               MSE   0.1692266
## 3              MAPE   5.1021440
## 4              RMSE   0.4113717
## 5               MAD   0.3486218

ARIMAX

Data Peubah X (Hari Libur)

data_libur <- data$hari_libur

Data TS Peubah X

#time series
datalibur.ts <- ts(data_libur, frequency = 52, start = c(2019,1), end = c(2021,52))
view(data1)

Splitting Data

datalibur.train <- ts(data_libur[1:124])
datalibur.test <- ts(data_libur[125:156])
datalibur.train
## Time Series:
## Start = 1 
## End = 124 
## Frequency = 1 
##   [1] 1 0 0 0 0 1 0 0 0 1 0 0 0 1 0 1 0 1 0 1 0 1 1 0 0 0 0 0 0 0 0 1 1 0 1 0 0
##  [38] 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 0 0 1 1 0 1 0 0 1 1 0 1 1
##  [75] 0 0 0 0 0 0 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1
## [112] 0 0 1 1 0 0 1 0 0 0 1 1 1
#time series data
liburtrain.ts <- ts(datalibur.train, frequency = 52, start = c(2019,1), end = c(2021,20))

liburtest.ts <- ts(datalibur.test, frequency = 52, start = c(2021,20), end = c(2021,52))

Correlation Peubah X

correlation <- cor(data_libur, data1)
correlation
## [1] -0.4287155

Model ARIMAX

X1 <- data_libur
y <- data1
reg <- lm(y~X1)
summary(reg)
## 
## Call:
## lm(formula = y ~ X1)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.83878 -0.13970  0.01974  0.16662  0.45175 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  7.02950    0.02094 335.690  < 2e-16 ***
## X1          -0.23488    0.03989  -5.889 2.36e-08 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.2226 on 154 degrees of freedom
## Multiple R-squared:  0.1838, Adjusted R-squared:  0.1785 
## F-statistic: 34.68 on 1 and 154 DF,  p-value: 2.356e-08

Diagnostik Sisaan

sisaanx <- reg$residuals
plot(sisaanx, type="o",ylab="Sisaan", xlab="Order")
abline(h=0,col="red")

Box.test(sisaanx)
## 
##  Box-Pierce test
## 
## data:  sisaanx
## X-squared = 77.822, df = 1, p-value < 2.2e-16

Tolak \(H_{0}\), karena p-value lebihi kecil dari taraf nyata. Artinya sisaan belum memenuhi asumsi white nose atau sisaan antara lag tidak saling bebas.

Pemodelan Sisaan Model Regresi Menggunakan ARIMA

acf(sisaanx)

pacf(sisaanx)

adf.test(sisaanx)
## 
##  Augmented Dickey-Fuller Test
## 
## data:  sisaanx
## Dickey-Fuller = -2.5093, Lag order = 5, p-value = 0.3641
## alternative hypothesis: stationary
#Differencing
sisaan.diff <- diff(sisaanx, differences = 1)
adf.test(sisaan.diff)
## Warning in adf.test(sisaan.diff): p-value smaller than printed p-value
## 
##  Augmented Dickey-Fuller Test
## 
## data:  sisaan.diff
## Dickey-Fuller = -5.7497, Lag order = 5, p-value = 0.01
## alternative hypothesis: stationary
acf(sisaan.diff, lag.max = 48)

pacf(sisaan.diff, lag.max = 48)

eacf(sisaan.diff)
## AR/MA
##   0 1 2 3 4 5 6 7 8 9 10 11 12 13
## 0 x x x x o o o o o o o  o  o  o 
## 1 x o x x o o o o o o o  o  o  o 
## 2 x o x x x o o o o o o  o  o  o 
## 3 x x o o o o o o o o o  o  o  o 
## 4 x x o o o o o o o o o  o  o  o 
## 5 x x o o o o o o o o o  o  o  o 
## 6 x x o x o o o o o o o  o  o  o 
## 7 x x x o o o o o o o o  o  o  o

ARIMAX(0,1,4) ARIMAX(1,1,4) ARIMAX(2,1,5) ARIMAX(3,1,2) ARIMAX(4,1,2) ARIMAX(4,1,4)

modelx1 <- Arima(data.train, order = c(0,1,4), xreg = cbind(datalibur.train), method = "ML")
modelx2 <- Arima(data.train, order = c(1,1,4), xreg = cbind(datalibur.train), method = "ML")
modelx3 <- Arima(data.train, order = c(2,1,5), xreg = cbind(datalibur.train), method = "ML")
modelx4 <- Arima(data.train, order = c(3,1,2), xreg = cbind(datalibur.train), method = "ML")
modelx5 <- Arima(data.train, order = c(4,1,2), xreg = cbind(datalibur.train), method = "ML")
modelx6 <- Arima(data.train, order = c(4,1,4), xreg = cbind(datalibur.train), method = "ML")
summary(modelx1)
## Series: data.train 
## Regression with ARIMA(0,1,4) errors 
## 
## Coefficients:
##           ma1      ma2      ma3     ma4     xreg
##       -0.2380  -0.2107  -0.1586  0.2598  -0.1647
## s.e.   0.0849   0.0916   0.1058  0.0945   0.0255
## 
## sigma^2 = 0.02399:  log likelihood = 57.18
## AIC=-102.36   AICc=-101.63   BIC=-85.49
## 
## Training set error measures:
##                        ME      RMSE      MAE         MPE     MAPE    MASE
## Training set -0.002816854 0.1511067 0.107757 -0.07403129 1.572359 0.84377
##                    ACF1
## Training set -0.0145193
coeftest(modelx1)
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value  Pr(>|z|)    
## ma1  -0.238000   0.084918 -2.8027  0.005067 ** 
## ma2  -0.210674   0.091625 -2.2993  0.021488 *  
## ma3  -0.158606   0.105814 -1.4989  0.133895    
## ma4   0.259838   0.094539  2.7485  0.005987 ** 
## xreg -0.164658   0.025505 -6.4558 1.076e-10 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(modelx2)
## Series: data.train 
## Regression with ARIMA(1,1,4) errors 
## 
## Coefficients:
##           ar1      ma1      ma2      ma3     ma4     xreg
##       -0.1035  -0.1475  -0.2354  -0.1749  0.2452  -0.1652
## s.e.   0.2669   0.2509   0.1137   0.1134  0.1053   0.0257
## 
## sigma^2 = 0.02417:  log likelihood = 57.26
## AIC=-100.52   AICc=-99.54   BIC=-80.83
## 
## Training set error measures:
##                        ME      RMSE       MAE         MPE    MAPE      MASE
## Training set -0.002971346 0.1510086 0.1077554 -0.07714613 1.57266 0.8437572
##                      ACF1
## Training set -0.005317544
coeftest(modelx2)
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value  Pr(>|z|)    
## ar1  -0.103549   0.266909 -0.3880   0.69805    
## ma1  -0.147457   0.250935 -0.5876   0.55678    
## ma2  -0.235379   0.113688 -2.0704   0.03842 *  
## ma3  -0.174894   0.113396 -1.5423   0.12299    
## ma4   0.245231   0.105324  2.3284   0.01989 *  
## xreg -0.165168   0.025704 -6.4257 1.313e-10 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(modelx3)
## Series: data.train 
## Regression with ARIMA(2,1,5) errors 
## 
## Coefficients:
## Warning in sqrt(diag(x$var.coef)): NaNs produced
##          ar1     ar2      ma1     ma2      ma3     ma4      ma5     xreg
##       0.3323  -0.404  -0.6012  0.2515  -0.1889  0.2472  -0.1993  -0.1685
## s.e.     NaN     NaN      NaN     NaN      NaN     NaN      NaN   0.0259
## 
## sigma^2 = 0.02402:  log likelihood = 58.61
## AIC=-99.22   AICc=-97.63   BIC=-73.92
## 
## Training set error measures:
##                        ME     RMSE       MAE         MPE     MAPE      MASE
## Training set -0.003765779 0.149266 0.1060721 -0.09154772 1.549315 0.8305764
##                      ACF1
## Training set 6.097265e-05
coeftest(modelx3)
## Warning in sqrt(diag(se)): NaNs produced
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value  Pr(>|z|)    
## ar1   0.332294        NaN     NaN       NaN    
## ar2  -0.404037        NaN     NaN       NaN    
## ma1  -0.601190        NaN     NaN       NaN    
## ma2   0.251549        NaN     NaN       NaN    
## ma3  -0.188873        NaN     NaN       NaN    
## ma4   0.247152        NaN     NaN       NaN    
## ma5  -0.199292        NaN     NaN       NaN    
## xreg -0.168455   0.025856 -6.5152 7.258e-11 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(modelx4)
## Series: data.train 
## Regression with ARIMA(3,1,2) errors 
## 
## Coefficients:
##           ar1      ar2      ar3     ma1     ma2     xreg
##       -0.6058  -0.5023  -0.4250  0.3436  0.1144  -0.1671
## s.e.   0.2208   0.1921   0.0922  0.2382  0.2248   0.0250
## 
## sigma^2 = 0.02356:  log likelihood = 58.75
## AIC=-103.49   AICc=-102.52   BIC=-83.81
## 
## Training set error measures:
##                        ME      RMSE       MAE         MPE     MAPE      MASE
## Training set -0.003178186 0.1491113 0.1059928 -0.08040824 1.547806 0.8299553
##                      ACF1
## Training set -0.001086962
coeftest(modelx4)
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value  Pr(>|z|)    
## ar1  -0.605845   0.220809 -2.7437  0.006074 ** 
## ar2  -0.502340   0.192113 -2.6148  0.008928 ** 
## ar3  -0.425008   0.092175 -4.6109 4.010e-06 ***
## ma1   0.343636   0.238213  1.4426  0.149146    
## ma2   0.114441   0.224758  0.5092  0.610629    
## xreg -0.167057   0.024972 -6.6898 2.234e-11 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(modelx5)
## Series: data.train 
## Regression with ARIMA(4,1,2) errors 
## 
## Coefficients:
##           ar1      ar2      ar3     ar4     ma1     ma2     xreg
##       -0.4322  -0.3983  -0.3527  0.0739  0.1709  0.0586  -0.1667
## s.e.   1.0781   0.7006   0.4552  0.4563  1.0764  0.4528   0.0249
## 
## sigma^2 = 0.02376:  log likelihood = 58.76
## AIC=-101.51   AICc=-100.25   BIC=-79.02
## 
## Training set error measures:
##                        ME      RMSE       MAE         MPE     MAPE      MASE
## Training set -0.003144319 0.1490994 0.1060267 -0.07973718 1.548264 0.8302211
##                      ACF1
## Training set -0.001741433
coeftest(modelx5)
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value  Pr(>|z|)    
## ar1  -0.432175   1.078068 -0.4009    0.6885    
## ar2  -0.398316   0.700640 -0.5685    0.5697    
## ar3  -0.352716   0.455191 -0.7749    0.4384    
## ar4   0.073873   0.456309  0.1619    0.8714    
## ma1   0.170884   1.076400  0.1588    0.8739    
## ma2   0.058642   0.452775  0.1295    0.8969    
## xreg -0.166748   0.024945 -6.6847 2.313e-11 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(modelx6)
## Series: data.train 
## Regression with ARIMA(4,1,4) errors 
## 
## Coefficients:
##          ar1     ar2      ar3     ar4      ma1      ma2      ma3      ma4
##       0.2404  0.0067  -0.0907  0.5050  -0.5493  -0.1924  -0.0183  -0.2246
## s.e.  0.2180  0.2374   0.2322  0.1755   0.2512   0.3090   0.2784   0.2834
##          xreg
##       -0.1638
## s.e.   0.0254
## 
## sigma^2 = 0.023:  log likelihood = 61.12
## AIC=-102.25   AICc=-100.28   BIC=-74.12
## 
## Training set error measures:
##                       ME      RMSE       MAE        MPE     MAPE      MASE
## Training set -0.01109985 0.1454113 0.1015751 -0.2029051 1.485237 0.7953633
##                      ACF1
## Training set -0.006510692
coeftest(modelx6)
## 
## z test of coefficients:
## 
##        Estimate Std. Error z value  Pr(>|z|)    
## ar1   0.2403722  0.2180117  1.1026  0.270216    
## ar2   0.0066621  0.2373958  0.0281  0.977612    
## ar3  -0.0906537  0.2322258 -0.3904  0.696264    
## ar4   0.5050464  0.1754706  2.8782  0.003999 ** 
## ma1  -0.5492535  0.2511619 -2.1869  0.028753 *  
## ma2  -0.1923766  0.3090342 -0.6225  0.533607    
## ma3  -0.0182897  0.2783858 -0.0657  0.947617    
## ma4  -0.2245517  0.2834235 -0.7923  0.428196    
## xreg -0.1638069  0.0253883 -6.4521 1.103e-10 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

ARIMAX(0,1,4) ARIMAX(1,1,4) ARIMAX(2,1,5) ARIMAX(3,1,2) ARIMAX(4,1,2) ARIMAX(4,1,4)

#AIC ARIMA dan Signifikansi Parameter
modelaccuracyX<-data.frame(
  "Model"=c("ARIMAX(0,1,4)","ARIMAX(1,1,4)", "ARIMAX(2,1,5)", "ARIMAX(3,1,2)", "ARIMAX(4,1,2)", "ARIMAX(4,1,4)"),
  "AIC"=c(modelx1$aic,modelx2$aic,modelx3$aic,modelx4$aic,modelx5$aic,modelx6$aic),
  "BIC"=c(modelx1$bic,modelx2$bic,modelx3$bic,modelx4$bic,modelx5$bic,modelx6$bic))

modelaccuracyX
##           Model       AIC       BIC
## 1 ARIMAX(0,1,4) -102.3590 -85.48590
## 2 ARIMAX(1,1,4) -100.5164 -80.83109
## 3 ARIMAX(2,1,5)  -99.2250 -73.91534
## 4 ARIMAX(3,1,2) -103.4939 -83.80862
## 5 ARIMAX(4,1,2) -101.5128 -79.01536
## 6 ARIMAX(4,1,4) -102.2451 -74.12321

model yang dipilih ARIMAX(3,1,2)

OVERFITTING ARIMAX(4,1,2) ARIMAX(3,1,3)

modelx4b <- Arima(data.train, order = c(4,1,2), xreg = cbind(datalibur.train), method = "ML")
modelx4c <- Arima(data.train, order = c(3,1,3), xreg = cbind(datalibur.train), method = "ML")
summary(modelx4b)
## Series: data.train 
## Regression with ARIMA(4,1,2) errors 
## 
## Coefficients:
##           ar1      ar2      ar3     ar4     ma1     ma2     xreg
##       -0.4322  -0.3983  -0.3527  0.0739  0.1709  0.0586  -0.1667
## s.e.   1.0781   0.7006   0.4552  0.4563  1.0764  0.4528   0.0249
## 
## sigma^2 = 0.02376:  log likelihood = 58.76
## AIC=-101.51   AICc=-100.25   BIC=-79.02
## 
## Training set error measures:
##                        ME      RMSE       MAE         MPE     MAPE      MASE
## Training set -0.003144319 0.1490994 0.1060267 -0.07973718 1.548264 0.8302211
##                      ACF1
## Training set -0.001741433
coeftest(modelx4b)
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value  Pr(>|z|)    
## ar1  -0.432175   1.078068 -0.4009    0.6885    
## ar2  -0.398316   0.700640 -0.5685    0.5697    
## ar3  -0.352716   0.455191 -0.7749    0.4384    
## ar4   0.073873   0.456309  0.1619    0.8714    
## ma1   0.170884   1.076400  0.1588    0.8739    
## ma2   0.058642   0.452775  0.1295    0.8969    
## xreg -0.166748   0.024945 -6.6847 2.313e-11 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
summary(modelx4c)
## Series: data.train 
## Regression with ARIMA(3,1,3) errors 
## 
## Coefficients:
##           ar1      ar2      ar3     ma1     ma2     ma3     xreg
##       -0.6098  -0.4962  -0.4700  0.3500  0.1131  0.0602  -0.1660
## s.e.   0.2113   0.1937   0.2049  0.2335  0.2279  0.2517   0.0252
## 
## sigma^2 = 0.02376:  log likelihood = 58.77
## AIC=-101.55   AICc=-100.29   BIC=-79.05
## 
## Training set error measures:
##                        ME      RMSE       MAE         MPE     MAPE      MASE
## Training set -0.003116757 0.1490762 0.1061362 -0.07915059 1.549883 0.8310782
##                      ACF1
## Training set -0.002702917
coeftest(modelx4c)
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value  Pr(>|z|)    
## ar1  -0.609830   0.211292 -2.8862  0.003899 ** 
## ar2  -0.496207   0.193703 -2.5617  0.010416 *  
## ar3  -0.469959   0.204900 -2.2936  0.021814 *  
## ma1   0.350033   0.233494  1.4991  0.133845    
## ma2   0.113131   0.227888  0.4964  0.619589    
## ma3   0.060217   0.251722  0.2392  0.810936    
## xreg -0.165971   0.025232 -6.5778 4.773e-11 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

tidak dilakukan overfitting, tetap ARIMAX(3,1,2)

Diagnostik model

sisaan.arimax <- modelx4$residuals
#white nose
Box.test(sisaan.arimax, lag = 12)
## 
##  Box-Pierce test
## 
## data:  sisaan.arimax
## X-squared = 3.2909, df = 12, p-value = 0.9931
Box.test(sisaan.arimax, lag = 24)
## 
##  Box-Pierce test
## 
## data:  sisaan.arimax
## X-squared = 13.725, df = 24, p-value = 0.9526
Box.test(sisaan.arimax, lag = 36)
## 
##  Box-Pierce test
## 
## data:  sisaan.arimax
## X-squared = 21.016, df = 36, p-value = 0.978
Box.test(sisaan.arimax, lag = 48)
## 
##  Box-Pierce test
## 
## data:  sisaan.arimax
## X-squared = 32.619, df = 48, p-value = 0.9562

tak tolak \(H_{0}\), sisaan tidak terjadi white nose atau sisaan antara lag tidak saling bebas.

plot(sisaan.arimax, type="o",ylab="Sisaan", xlab="Order",main="Plot Sisaan vs Order")
abline(h=0,col="red")

acf(sisaan.arimax, lag.max = 24, main = "Plot ACF Sisaan Model ARIMAX") 

pacf(sisaan.arimax, lag.max = 24, main = "Plot ACF Sisaan Model ARIMAX")

Forecasting

ramalanx <- forecast((modelx4), xreg = datalibur.test)

data.ramalanx <- ramalanx$mean
data.ramalan.tsx <- ts(data.ramalanx, start = 2022, frequency = 104)
plot(ramalanx,col="black",col.sub ="black",col.axis="black",
     col.lab="black",col.main="black",lwd=2)
box(col="black",lwd=2)

ts.plot(data.ts,xlab = "Periode", ylab = "Data Jumlah Kedatangan", col="black",lwd=2,main="Forecasting ARIMAX(3,1,1)",gpars = list(col.main="black",col.axis="black",col.sub="black"))
lines(data.ramalan.tsx, col = "blue",lwd=2)
lines(testing.ts, col = "red", lwd =2)
legend("bottomleft",c("Data Training", "Data Testing", "Data Forecast ARIMAX(3,1,1)"), 
       lwd=2, col=c("black","red","blue"), cex=0.8)
box(col="black",lwd=2)

ramalanx <- matrix(c(data.ramalanx[1:30]), nrow = 30, ncol = 1, byrow = FALSE)
colnames(ramalanx) <- c("Ramalan")
ramalanx
##        Ramalan
##  [1,] 6.709103
##  [2,] 6.761674
##  [3,] 6.830713
##  [4,] 6.882531
##  [5,] 6.878033
##  [6,] 6.896386
##  [7,] 6.865504
##  [8,] 6.876906
##  [9,] 6.710654
## [10,] 6.884621
## [11,] 6.875184
## [12,] 6.710031
## [13,] 6.710681
## [14,] 6.880399
## [15,] 6.877651
## [16,] 6.877703
## [17,] 6.877921
## [18,] 6.878931
## [19,] 6.878187
## [20,] 6.878038
## [21,] 6.878073
## [22,] 6.711385
## [23,] 6.878264
## [24,] 6.878172
## [25,] 6.878160
## [26,] 6.878289
## [27,] 6.878256
## [28,] 6.878216
## [29,] 6.878202
## [30,] 6.878245
error <- data.frame(data.test)-data.frame(data.ramalanx[1:32]) 

## SSE (Sum Square Error)
SSE <- sum(error^2, na.rm = T)

## MSE (Mean Squared Error)
MSE<- sapply(error^2, mean, na.rm = T)

## RMSE (Root Mean Square Error)
RMSE <- sqrt(MSE)

## MAD (Mean Absolute Deviation)
MAD <- sapply(abs(error), mean, na.rm = T)

## MAPE (Mean Absolute Percentage Error)
r.error <- (error/data.frame(data.test))*100 # Relative Error
MAPE <- sapply(abs(r.error), mean, na.rm = T)

akurasifarimax <- data.frame(
  "Ukuran Keakuratan" = c("SSE", "MSE", "MAPE", "RMSE", "MAD"), 
  "Forecasting" = c(SSE, MSE, MAPE, RMSE, MAD))
akurasifarimax
##   Ukuran.Keakuratan Forecasting
## 1               SSE  2.14776444
## 2               MSE  0.06711764
## 3              MAPE  2.80274889
## 4              RMSE  0.25907072
## 5               MAD  0.20000282
forecasterror <- data.frame("Ukuran Keakuratan" = akurasifarima$Ukuran.Keakuratan, "Akurasi ARIMA" = akurasifarima$Forecasting , "Akurasi ARIMAX" = akurasifarimax$Forecasting)

kable(forecasterror)
Ukuran.Keakuratan Akurasi.ARIMA Akurasi.ARIMAX
SSE 5.4152528 2.1477644
MSE 0.1692266 0.0671176
MAPE 5.1021440 2.8027489
RMSE 0.4113717 0.2590707
MAD 0.3486218 0.2000028