Package dan Library

library(readxl)
library(TTR)
library(forecast)
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
library(tseries)
library(TSA)
## Registered S3 methods overwritten by 'TSA':
##   method       from    
##   fitted.Arima forecast
##   plot.Arima   forecast
## 
## Attaching package: 'TSA'
## The following objects are masked from 'package:stats':
## 
##     acf, arima
## The following object is masked from 'package:utils':
## 
##     tar
library(dynlm)
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
library(lmtest)
library(imputeTS)
## 
## Attaching package: 'imputeTS'
## The following object is masked from 'package:zoo':
## 
##     na.locf
## The following object is masked from 'package:tseries':
## 
##     na.remove
library(stats)
library(MASS)
library(kableExtra)
library(padr)
library(astsa)
## 
## Attaching package: 'astsa'
## The following object is masked from 'package:forecast':
## 
##     gas
library(tfarima)
## 
## Attaching package: 'tfarima'
## The following object is masked from 'package:TSA':
## 
##     spec
## The following objects are masked from 'package:forecast':
## 
##     easter, seasadj
library(ggplot2)

Import Data

dataset <- read_excel("~/Downloads/Data Inflasi-2.xlsx")
dataset$`Data Inflasi` <- as.numeric(dataset$`Data Inflasi`)
datakurs <- dataset$`Kurs Jual`
datainflasi <- dataset$`Data Inflasi`

Mengubah Data Menjadi Time Series

data.ts <- ts(datakurs)
datai.ts <- ts(datainflasi)

Eksplorasi Data

Plot Data Kurs Jual

plot(data.ts,xlab ="Waktu", ylab = "Data Kurs (Ribu)", col="black", main = "Plot Data Deret Waktu Data Kurs")
points(data.ts)

Plot Data Inflasi

plot(datai.ts,xlab ="Waktu", ylab = "Data Inflasi (Persen)", col="black", main = "Plot Data Inflasi (Persen)")
points(datai.ts)

Splitting Data (Training dan Testing)

Splitting Data Kurs Jual

data.train <- ts(datakurs[1:96])
data.test <- ts(datakurs[97:130], start = 97)
datai.train <- ts(datainflasi[1:96])
datai.test <- ts(datainflasi[97:130], start = 97)

Plot Data Kurs Jual

par(mfrow=c(2,1))
plot(data.train, xlab ="Waktu", ylab = "Data Kurs (Ribu)", col="red", main = "Plot Data Training")
points(data.train)

plot(data.test, xlab ="Waktu", ylab = "Data Kurs (Ribu)", col="red", main = "Plot Data Testing")
points(data.test)

Plot Pembagian Training Testing

## Plot Data Training dan testing kurs
ts.plot(data.ts, xlab = "Periode", ylab ="Data Inflasi (Persen)", 
        main = "Plot Data Training dan Data Testing")
lines(data.train, col = "blue")
lines(data.test, col="Red")
legend(-0.9,22100,c("Data Training","Data Testing"), 
       lty=8, col=c("blue","red"), cex=0.8)
abline(v=97, col=c("black"), lty=1, lwd=1)

Splitting Data Inflasi

datai.train <- ts(datainflasi[1:96])
datai.test <- ts(datainflasi[97:130], start = 97)

Plot Data Inflasi

par(mfrow=c(2,1))
plot(datai.train,xlab ="Waktu", ylab = "Data Inflasi (Persen)", col="red", main = "Data Training Inflasi")
points(datai.train)

plot(datai.test,xlab ="Waktu", ylab = "Data Inflasi (Persen)", col="red", main = "Data Testing Inflasi")
points(datai.test)

Plot Pembagian Training dan Testing

ts.plot(datai.ts, xlab = "Periode", ylab ="Data Inflasi (Persen)", 
        main = "Plot Data Training dan Data Testing")
lines(datai.train, col = "blue")
lines(datai.test, col="Red")
legend(-0.9,3,c("Data Training","Data Testing"), 
       lty=8, col=c("blue","red"), cex=0.8)
abline(v=97, col=c("black"), lty=1, lwd=1)

Smoothing Data

DMA (Double Moving Average)

iterasi_dma <- function(x,min,max,metode=c("SSE","MSE","RMSE","MAD","MAPE")){
  tabledma <- data.frame()
  z <- max-min+1
  temp <- sqrt(z)
  a <- round(temp) 
  b=a
  if(a*b<z){
    b=b+1
  }
  par(mfrow=c(a,b))
  for(i in min:max) {
    df_ts_sma <- SMA(x,  n=i)
    df_ts_dma <- SMA(df_ts_sma, n=i)
    At <- 2*df_ts_sma-df_ts_dma
    Bt <- df_ts_sma-df_ts_dma
    dmasmoothing <- At+Bt
    ramal_dma <- c(NA, dmasmoothing)
    df_dma <- cbind(df_aktual=c(x,NA), dmasmoothing=c(dmasmoothing,NA), ramal_dma)
    error.dma <- df_dma[, 1] - df_dma[, 3]
    SSE.dma <- sum(error.dma^2, na.rm = T)
    MSE.dma <- mean(error.dma^2, na.rm = T)
    RMSE.dma <- sqrt(mean(error.dma^2, na.rm = T))
    MAD.dma <- mean(abs(error.dma), na.rm = T)
    r.error.dma <- (error.dma/df_dma[, 1])*100 
    MAPE.dma <- mean(abs(r.error.dma), na.rm = T)
    ak <- data.frame(n=i,SSE=SSE.dma,MSE=MSE.dma,RMSE=RMSE.dma,
                     MAD=MAD.dma,MAPE=MAPE.dma)
    tabledma <- rbind(tabledma,ak)
  }
  opt <- tabledma$n[which.min(tabledma[,metode])]
  return(list(Akurasi=tabledma,n.optimum=paste("Nilai n optimum dengan menggunakan metode Double Moving Average berada pada n =",opt),smoothing=dmasmoothing, ramalan=ramal_dma))
}

dma_iter<-iterasi_dma(data.train,2,30, metode= "MAPE")
dma_iter[[2]]
## [1] "Nilai n optimum dengan menggunakan metode Double Moving Average berada pada n = 2"
#Single Moving  Average dengan N=2
df_ts_sma <- SMA(data.train,  n=2)

#Double Moving  Average dengan N=2
df_ts_dma <- SMA(df_ts_sma, n=2)
At <- c(2*df_ts_sma-df_ts_dma)
Bt <- c(df_ts_sma-df_ts_dma)
pemulusan_dma <- c(At+Bt)
ramal_dma <- c(NA, pemulusan_dma)
df_dma <- cbind(df_aktual=c(data.train,NA), pemulusan_dma=c(pemulusan_dma,NA), ramal_dma)
head(df_dma)
##      df_aktual pemulusan_dma ramal_dma
## [1,]   14145.3            NA        NA
## [2,]   14342.1            NA        NA
## [3,]   14631.5      14729.90        NA
## [4,]   14913.0      15057.70  14729.90
## [5,]   14463.4      14604.15  15057.70
## [6,]   14739.1      14514.30  14604.15

Plot DMA

ts.plot(data.train, xlab="Tahun", ylab="Data Kurs", col="blue", lty=3)
points(data.train)

pemulusan_dma.ts<-ts(pemulusan_dma)
ramal_dma.ts <- ts(ramal_dma)

lines(pemulusan_dma.ts,col="red",lwd=2)
lines(ramal_dma.ts,col="black",lwd=2)
title("Double Moving Average N = 2", cex.main=1, font.main=4 ,col.main="black")
legend("topleft", c("Data Aktual","Pemulusan","Ramalan"),lty=3:1,col=c ("blue","red","black"))

Uji Keakuratan dan Error

# Ukuran Keakuratan
error.dma <- df_dma[, 1] - df_dma[, 3]

## SSE (Sum Square Error)
SSE.dma <- sum(error.dma^2, na.rm = T)

## MSE (Mean Squared Error)
MSE.dma <- mean(error.dma^2, na.rm = T)

## RMSE (Root Mean Square Error)
RMSE.dma <- sqrt(mean(error.dma^2, na.rm = T))

## MAD (Mean Absolute Deviation)
MAD.dma <- mean(abs(error.dma), na.rm = T)

## MAPE (Mean Absolute Percentage Error)
r.error.dma <- (error.dma/df_dma[, 1])*100 # Relative Error
MAPE.dma <- mean(abs(r.error.dma), na.rm = T)


akurasidma <- data.frame(
          "Ukuran Keakuratan" = c("SSE", "MSE", "MAPE", "RMSE", "MAD"), 
          "Double Moving Average N=2" = c(SSE.dma, MSE.dma, MAPE.dma, RMSE.dma, MAD.dma))
kable(akurasidma)
Ukuran.Keakuratan Double.Moving.Average.N.2
SSE 3.588420e+07
MSE 3.858517e+05
MAPE 2.641975e+00
RMSE 6.211696e+02
MAD 4.766640e+02

DES (Double Exponential Smoothing)

DES <- HoltWinters(data.train, gamma = FALSE)
tabledes <- data.frame("Alpha Optimum"=c(DES$alpha), "Beta Optimum"=c(DES$beta))
kable(tabledes)
Alpha.Optimum Beta.Optimum
alpha 0.9843361 0.0296052
ramalanDES <- forecast(DES, h=34)
ramalanDES
##     Point Forecast    Lo 80    Hi 80     Lo 95    Hi 95
##  97       18434.78 17688.37 19181.19 17293.241 19576.32
##  98       18457.50 17394.78 19520.23 16832.206 20082.80
##  99       18480.22 17163.02 19797.42 16465.741 20494.71
## 100       18502.95 16961.85 20044.04 16146.043 20859.85
## 101       18525.67 16779.18 20272.15 15854.649 21196.69
## 102       18548.39 16608.89 20487.89 15582.177 21514.60
## 103       18571.11 16447.37 20694.85 15323.124 21819.10
## 104       18593.83 16292.31 20895.36 15073.951 22113.71
## 105       18616.55 16142.12 21090.99 14832.234 22400.87
## 106       18639.28 15995.67 21282.88 14596.237 22682.31
## 107       18662.00 15852.13 21471.87 14364.670 22959.32
## 108       18684.72 15710.83 21658.61 14136.544 23232.89
## 109       18707.44 15571.28 21843.61 13911.091 23503.79
## 110       18730.16 15433.07 22027.25 13687.695 23772.63
## 111       18752.88 15295.89 22209.88 13465.860 24039.91
## 112       18775.61 15159.45 22391.76 13245.180 24306.03
## 113       18798.33 15023.56 22573.10 13025.315 24571.34
## 114       18821.05 14888.01 22754.09 12805.984 24836.11
## 115       18843.77 14752.65 22934.89 12586.947 25100.59
## 116       18866.49 14617.36 23115.63 12368.002 25364.98
## 117       18889.21 14482.01 23296.42 12148.974 25629.45
## 118       18911.94 14346.51 23477.37 11929.714 25894.16
## 119       18934.66 14210.77 23658.55 11710.092 26159.22
## 120       18957.38 14074.72 23840.04 11489.994 26424.76
## 121       18980.10 13938.29 24021.91 11269.323 26690.88
## 122       19002.82 13801.44 24204.21 11047.991 26957.65
## 123       19025.54 13664.10 24386.99 10825.923 27225.17
## 124       19048.27 13526.24 24570.30 10603.052 27493.48
## 125       19070.99 13387.81 24754.17 10379.318 27762.66
## 126       19093.71 13248.79 24938.63 10154.669 28032.75
## 127       19116.43 13109.13 25123.73  9929.059 28303.80
## 128       19139.15 12968.82 25309.48  9702.445 28575.86
## 129       19161.87 12827.83 25495.92  9474.792 28848.96
## 130       19184.60 12686.14 25683.05  9246.067 29123.13

Plot DES

plot(DES)
points(data.train)
legend("topright", c("Data aktual","Pemulusan DES"),lty=1:2,col=c ("black","red"))

Uji Keakuratan dan Error

# ukuran keakuratan
SSEDES<-DES$SSE 

MSEDES<-DES$SSE/length(data.train) 

RMSEDES<-sqrt(MSEDES)

# Mencari MAPE dengan manual
fittedDES<-ramalanDES$fitted 
sisaanDES<-ramalanDES$residuals 
residDES<-data.train-ramalanDES$fitted 
MAPEDES <-sum(abs(sisaanDES[3:length(data.train)]/data.train[3:length(data.train)])*100)/length(data.train)

akurasiDES <- data.frame(
          "Ukuran Keakuratan Data Training" = c("SSE", "MSE", "RMSE", "MAPE"), 
          "Double Exponential Smoothing" = c(SSEDES, MSEDES, RMSEDES, MAPEDES))
kable(akurasiDES)
Ukuran.Keakuratan.Data.Training Double.Exponential.Smoothing
SSE 3.192736e+07
MSE 3.325767e+05
RMSE 5.766946e+02
MAPE 2.309316e+00

Perbandingan DMA dan DES

akurasiDESDMA <- data.frame(
          "Ukuran Keakuratan Data Training" = c("SSE", "MSE", "RMSE", "MAPE"), 
          "Double Exponential Smoothing" = c(SSEDES, MSEDES, RMSEDES, MAPEDES), "Double Moving Average N = 2" = c(SSE.dma, MSE.dma, RMSE.dma, MAPE.dma))
kable(akurasiDESDMA)
Ukuran.Keakuratan.Data.Training Double.Exponential.Smoothing Double.Moving.Average.N…2
SSE 3.192736e+07 3.588420e+07
MSE 3.325767e+05 3.858517e+05
RMSE 5.766946e+02 6.211696e+02
MAPE 2.309316e+00 2.641975e+00

Uji Keakuratan dan Error

selisih.test <- c(ramalanDES$mean)-c(data.test)

SSEtesting<-sum(selisih.test^2) 
MSEtesting<-SSEtesting/length(data.test) 
RMSEtesting<-sqrt(MSEtesting)
MAPEtesting<-sum(abs(selisih.test))/length(data.test)

akurasi.test <- data.frame("Perbandingan Ukuran Keakuratan Data Testing" = c("SSE", "MSE", "RMSE", "MAPE"), "Double Exponential Smoothing" = c(SSEtesting, MSEtesting, RMSEtesting, MAPEtesting))
kable(akurasi.test)
Perbandingan.Ukuran.Keakuratan.Data.Testing Double.Exponential.Smoothing
SSE 3.035553e+07
MSE 8.928097e+05
RMSE 9.448861e+02
MAPE 7.774000e+02

Pemodelan Arima

Differencing 1

data.dif1<-diff(data.train,differences = 1) 
plot.ts(data.dif1,lty=1,ylab= "Data Inflasi Pembedaan 1", main="Plot Differencing Data Kurs")

Uji Kestasioneran Data

adf.test(data.dif1)
## 
##  Augmented Dickey-Fuller Test
## 
## data:  data.dif1
## Dickey-Fuller = -3.7848, Lag order = 4, p-value = 0.02279
## alternative hypothesis: stationary

Plot Data ACF

acfdif <- acf(data.dif1, lag.max = 24)

Plot Data PACF

pacfdif <- pacf(data.dif1, lag.max = 24)

Plot Data EACF

eacf(data.dif1)
## AR/MA
##   0 1 2 3 4 5 6 7 8 9 10 11 12 13
## 0 o o o o o o o o o o o  o  o  o 
## 1 o o o o o o o o o o o  o  o  o 
## 2 o x o o o o o o o o o  o  o  o 
## 3 o x x o o o o o o o o  o  o  o 
## 4 x x x o o o o o o o o  o  o  o 
## 5 x o x o x o o o o o o  o  o  o 
## 6 x o x x o o o o o o o  o  o  o 
## 7 x x o x o o o o o o o  o  o  o

Differencing 2

data.dif2<-diff(data.train,differences = 2) 
plot.ts(data.dif2,lty=1,ylab= "Data Inflasi Pembedaan 1", main="Plot Differencing Data Kurs")

Cek Kestasioneran Data

adf.test(data.dif2)
## Warning in adf.test(data.dif2): p-value smaller than printed p-value
## 
##  Augmented Dickey-Fuller Test
## 
## data:  data.dif2
## Dickey-Fuller = -6.0058, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary

Plot Data ACF

acfdif <- acf(data.dif2, lag.max = 24)

cut off di lag 1

Plot Data PACF

pacfdif <- pacf(data.dif2, lag.max = 24)

cut off di lag off

Plot Data EACF

eacf(data.dif2)
## AR/MA
##   0 1 2 3 4 5 6 7 8 9 10 11 12 13
## 0 x o o o o o o o o o o  o  o  o 
## 1 x x o o o o o o o o o  o  o  o 
## 2 x x o o o o o o o o o  o  o  o 
## 3 x o x x o o o o o o o  o  o  o 
## 4 x o o x o o o o o o o  o  o  o 
## 5 x o o x o o o o o o o  o  o  o 
## 6 x o o x o x o o o o o  o  o  o 
## 7 x x o x o x o o o o o  o  o  o

diperoleh model : 1. Arima(0,2,1) 2. Arima(1,2,2) 3. Arima(2,2,3) 5. Arima(1,2,0) 6. Arima(1,2,1)

Pemodelan

model1 <- Arima(data.dif1, order=c(0,2,1), method="ML")   
model2 <- Arima(data.dif1, order=c(1,2,2), method="ML")  
model3 <- Arima(data.dif1, order=c(2,2,3), method="ML")
model4 <- Arima(data.dif1, order=c(1,2,0), method="ML") 
model5 <- Arima(data.dif1, order=c(1,2,1), method="ML")

Penduga Parameter Model

Arima(0,2,1)

summary(model1) 
## Series: data.dif1 
## ARIMA(0,2,1) 
## 
## Coefficients:
##           ma1
##       -1.0000
## s.e.   0.0265
## 
## sigma^2 = 686677:  log likelihood = -758.67
## AIC=1521.34   AICc=1521.48   BIC=1526.41
## 
## Training set error measures:
##                    ME     RMSE      MAE      MPE     MAPE      MASE       ACF1
## Training set 3.284043 815.4705 619.3993 840.8557 988.0674 0.9863497 -0.5543161
lmtest::coeftest((model1)) 
## 
## z test of coefficients:
## 
##      Estimate Std. Error z value  Pr(>|z|)    
## ma1 -1.000000   0.026518 -37.711 < 2.2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Arima(1,2,2)

summary(model2) 
## Series: data.dif1 
## ARIMA(1,2,2) 
## 
## Coefficients:
##           ar1      ma1     ma2
##       -0.0236  -1.9413  0.9414
## s.e.   0.1206   0.1200  0.1183
## 
## sigma^2 = 356296:  log likelihood = -730.85
## AIC=1469.7   AICc=1470.16   BIC=1479.83
## 
## Training set error measures:
##                    ME     RMSE      MAE      MPE     MAPE      MASE        ACF1
## Training set 8.560327 580.9854 438.2293 91.75477 143.0967 0.6978493 -0.01477678
lmtest::coeftest((model2)) 
## 
## z test of coefficients:
## 
##      Estimate Std. Error  z value  Pr(>|z|)    
## ar1 -0.023606   0.120591  -0.1958    0.8448    
## ma1 -1.941303   0.120004 -16.1769 < 2.2e-16 ***
## ma2  0.941420   0.118314   7.9570 1.763e-15 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Arima(2,2,3)

summary(model3) 
## Series: data.dif1 
## ARIMA(2,2,3) 
## 
## Coefficients:
##           ar1     ar2      ma1      ma2     ma3
##       -0.7324  0.0774  -1.2332  -0.4808  0.7143
## s.e.   0.2985  0.1213   0.3135   0.5440  0.2876
## 
## sigma^2 = 354616:  log likelihood = -730.15
## AIC=1472.3   AICc=1473.28   BIC=1487.49
## 
## Training set error measures:
##                    ME     RMSE      MAE      MPE     MAPE      MASE        ACF1
## Training set 10.19519 573.1376 435.4128 48.93334 186.8701 0.6933641 -0.01895218
lmtest::coeftest((model3)) 
## 
## z test of coefficients:
## 
##      Estimate Std. Error z value  Pr(>|z|)    
## ar1 -0.732380   0.298484 -2.4537   0.01414 *  
## ar2  0.077404   0.121316  0.6380   0.52345    
## ma1 -1.233185   0.313548 -3.9330 8.389e-05 ***
## ma2 -0.480812   0.543986 -0.8839   0.37677    
## ma3  0.714304   0.287572  2.4839   0.01299 *  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Arima(1,2,0)

summary(model4)
## Series: data.dif1 
## ARIMA(1,2,0) 
## 
## Coefficients:
##           ar1
##       -0.7027
## s.e.   0.0722
## 
## sigma^2 = 1061823:  log likelihood = -777.01
## AIC=1558.02   AICc=1558.15   BIC=1563.08
## 
## Training set error measures:
##                     ME     RMSE      MAE      MPE     MAPE    MASE      ACF1
## Training set -5.023461 1014.047 760.1402 794.1505 1022.046 1.21047 -0.335012
lmtest::coeftest((model4))
## 
## z test of coefficients:
## 
##      Estimate Std. Error z value  Pr(>|z|)    
## ar1 -0.702697   0.072162 -9.7378 < 2.2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Arima(1,2,1)

summary(model5) 
## Series: data.dif1 
## ARIMA(1,2,1) 
## 
## Coefficients:
##           ar1      ma1
##       -0.5448  -1.0000
## s.e.   0.0856   0.0272
## 
## sigma^2 = 480242:  log likelihood = -742.14
## AIC=1490.29   AICc=1490.56   BIC=1497.89
## 
## Training set error measures:
##                    ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set 7.674271 678.2486 505.3546 -132.1662 503.8812 0.8047415 -0.1775173
lmtest::coeftest((model5)) 
## 
## z test of coefficients:
## 
##      Estimate Std. Error  z value  Pr(>|z|)    
## ar1 -0.544830   0.085619  -6.3634 1.973e-10 ***
## ma1 -0.999999   0.027161 -36.8169 < 2.2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Perbandingan Kebaikan Model

modelaccuracy<-data.frame(
  "Model"=c("ARIMA(0,2,1)", "ARIMA(1,2,2)", "ARIMA(2,2,3)","ARIMA(1,2,0)", "ARIMA(1,2,1)"),
  "AIC"=c(model1$aic, model2$aic, model3$aic,model4$aic,model5$aic),
  "BIC"=c(model1$bic, model2$bic, model3$bic,model4$bic,model5$bic),
  "Signifikansi"=c("Signifikan","Tidak Signifikan","Tidak Signifikan"," Signifikan","Signifikan"))

modelaccuracy
##          Model      AIC      BIC     Signifikansi
## 1 ARIMA(0,2,1) 1521.345 1526.410       Signifikan
## 2 ARIMA(1,2,2) 1469.702 1479.832 Tidak Signifikan
## 3 ARIMA(2,2,3) 1472.299 1487.494 Tidak Signifikan
## 4 ARIMA(1,2,0) 1558.019 1563.084       Signifikan
## 5 ARIMA(1,2,1) 1490.289 1497.886       Signifikan

Dengan model terbaik Arima(1,2,1)

Diagnostik Model

Overfitting

menggunakan model Arima(1,2,1) dengan Arima(2,1,1) dan Arima(1,2,2)

model5a <- Arima(data.dif1, order=c(2,1,1), method="ML")
model5b <- Arima(data.dif1, order=c(1,2,2), method="ML")

Model Arima(1,2,1)

coeftest(model5)
## 
## z test of coefficients:
## 
##      Estimate Std. Error  z value  Pr(>|z|)    
## ar1 -0.544830   0.085619  -6.3634 1.973e-10 ***
## ma1 -0.999999   0.027161 -36.8169 < 2.2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Model Arima(2,1,1)

coeftest(model5a)
## 
## z test of coefficients:
## 
##        Estimate  Std. Error z value Pr(>|z|)    
## ar1 -0.00016939  0.10275762 -0.0016   0.9987    
## ar2  0.10732610  0.10239164  1.0482   0.2946    
## ma1 -0.99992084  0.20961543 -4.7703 1.84e-06 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Model Arima(1,2,2)

coeftest(model5b)
## 
## z test of coefficients:
## 
##      Estimate Std. Error  z value  Pr(>|z|)    
## ar1 -0.023606   0.120591  -0.1958    0.8448    
## ma1 -1.941303   0.120004 -16.1769 < 2.2e-16 ***
## ma2  0.941420   0.118314   7.9570 1.763e-15 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Uji Diagnostik

Diagnostik Model : Eksploratif

sisaan5 <- model5$residuals

par(mfrow=c(2,2))
qqnorm(sisaan5)
box(col="black",lwd=2)
qqline(sisaan5, col = "red", lwd =1, col.lab="black",
       col.axis="black",col.sub = "black")
box(col="black",lwd=2)
plot(c(1:length(sisaan5)),sisaan5,col="black",col.lab="black",col.axis="black")
box(col="black",lwd=2)
acf(sisaan5,col="black",col.sub = "black",col.axis="black", col.lab="black")
box(col="black",lwd=2)
pacf(sisaan5,col="black",col.sub = "black",col.axis="black", col.lab="black",col.main="black")
box(col="black",lwd=2)

Diagnostik Model : Uji Formal

Sisaan Menyebar Normal

shapiro.test(sisaan5)
## 
##  Shapiro-Wilk normality test
## 
## data:  sisaan5
## W = 0.95542, p-value = 0.002651

Sisaan Saling Bebas

Box.test(sisaan5, type = "Ljung")
## 
##  Box-Ljung test
## 
## data:  sisaan5
## X-squared = 3.0892, df = 1, p-value = 0.07881

Nilai Tengah Sisaan Sama dengan Nol

t.test(sisaan5, mu = 0, conf.level = 0.95) 
## 
##  One Sample t-test
## 
## data:  sisaan5
## t = 0.10971, df = 94, p-value = 0.9129
## alternative hypothesis: true mean is not equal to 0
## 95 percent confidence interval:
##  -131.2161  146.5647
## sample estimates:
## mean of x 
##  7.674271

Forcasting

ramalan <- forecast(Arima(data.train, order=c(1,2,1),method="ML",include.drift = TRUE),h = length(data.test))
## Warning in Arima(data.train, order = c(1, 2, 1), method = "ML", include.drift =
## TRUE): No drift term fitted as the order of difference is 2 or more.
data.ramalan <- ramalan$mean
data.ramalan.ts <- ts(data.ramalan, start = 96)
plot(ramalan,col="black",col.sub ="black",col.axis="black",
     col.lab="black",col.main="black",lwd=2)
box(col="black",lwd=2)

ts.plot(data.train, ylab = "Data Inflasi(Persen)", col="black",lwd=2,main="Forecasting ARIMA(2,1,0)",gpars = list(col.main="black",col.axis="black",col.sub="black"))
lines(data.ramalan.ts, col = "blue",lwd=2)
lines(data.test, col = "red", lwd =2)
legend(61,16400,c("Data Training", "Data Testing","Data Forecast ARIMA"), 
       lwd=2, col=c("black","red","blue"), cex=0.8)
box(col="black",lwd=2)

perbandingan.temp<-matrix(data=c(data.test[1:10], data.ramalan[1:10]), nrow = 10, ncol = 2)
colnames(perbandingan.temp)<-c("Aktual","Hasil Forecast")
head(perbandingan.temp)
##       Aktual Hasil Forecast
## [1,] 18016.6       18438.24
## [2,] 18383.9       18463.42
## [3,] 20241.3       18488.59
## [4,] 18667.6       18513.76
## [5,] 17991.4       18538.92
## [6,] 17581.8       18564.09
error <- data.frame(data.test)-data.frame(data.ramalan[1:34]) 

## SSE (Sum Square Error)
SSE <- sum(error^2, na.rm = T)

## MSE (Mean Squared Error)
MSE<- sapply(error^2, mean, na.rm = T)

## RMSE (Root Mean Square Error)
RMSE <- sqrt(MSE)

## MAD (Mean Absolute Deviation)
MAD <- sapply(abs(error), mean, na.rm = T)

## MAPE (Mean Absolute Percentage Error)
r.error <- (error/data.frame(data.test))*100 # Relative Error
MAPE <- sapply(abs(r.error), mean, na.rm = T)

akurasifarima <- data.frame(
  "Ukuran Keakuratan" = c("SSE", "MSE", "MAPE", "RMSE", "MAD"), 
  "Forecasting" = c(SSE, MSE, MAPE, RMSE, MAD))
akurasifarima
##   Ukuran.Keakuratan  Forecasting
## 1               SSE 3.079050e+07
## 2               MSE 9.056031e+05
## 3              MAPE 4.106589e+00
## 4              RMSE 9.516318e+02
## 5               MAD 7.714150e+02
kable(akurasifarima)
Ukuran.Keakuratan Forecasting
SSE 3.079050e+07
MSE 9.056031e+05
MAPE 4.106589e+00
RMSE 9.516318e+02
MAD 7.714150e+02