MPDW P8

Library

library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ dplyr     1.1.4     ✔ readr     2.1.5
## ✔ forcats   1.0.0     ✔ stringr   1.5.1
## ✔ ggplot2   3.5.1     ✔ tibble    3.2.1
## ✔ lubridate 1.9.3     ✔ tidyr     1.3.1
## ✔ purrr     1.0.2     
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::filter() masks stats::filter()
## ✖ dplyr::lag()    masks stats::lag()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(forecast)
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
library(TSA)
## Registered S3 methods overwritten by 'TSA':
##   method       from    
##   fitted.Arima forecast
##   plot.Arima   forecast
## 
## Attaching package: 'TSA'
## 
## The following object is masked from 'package:readr':
## 
##     spec
## 
## The following objects are masked from 'package:stats':
## 
##     acf, arima
## 
## The following object is masked from 'package:utils':
## 
##     tar
library(aTSA)
## 
## Attaching package: 'aTSA'
## 
## The following object is masked from 'package:forecast':
## 
##     forecast
## 
## The following object is masked from 'package:graphics':
## 
##     identify
library(car)
## Loading required package: carData
## 
## Attaching package: 'car'
## 
## The following object is masked from 'package:dplyr':
## 
##     recode
## 
## The following object is masked from 'package:purrr':
## 
##     some
library(lmtest)
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## 
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
library(datasets)

Input Data

Dataset ini diambil dari package datasets yang menampilkan jumlah total penumpang pesawat setiap bulan selama 12 tahun, dengan setiap angka di dataset menunjukkan jumlah penumpang dalam ribuan.

data <- datasets::AirPassengers

Eksplorasi Data

ts.plot(data, type="l", xlab = "Year", ylab="Passsengers", col="red")
title(main = "Time Series Plot of Airline Passengers", cex.sub = 0.8)
points(data, pch = 20, col = "red")

dec <- decompose(data)
plot(dec)

Secara eksplorasi, terlihat adanya kecenderungan data memiliki tren naik dan perilaku berulang kecenderungan musiman) dalam deret tersebut. Kecenderungan musiman dapat dilihat dengan lebih jelas dengan menampilkan deret waktu per tahun.

seasonplot(data,12,main="Seasonal Plot of Airline Passengers", ylab="Year", xlab="Month", year.labels = TRUE, col=rainbow(18))

Gambar menunjukkan bahwa penumpang pesawat tertinggi pada bulan Juni sampai bulan September. Perilaku tersebut terus berulang dari tahun ke tahun.

monthplot(data,ylab="Passengers", col="red")

frame<-data.frame(values=as.matrix(data), date=lubridate::year(zoo::as.Date(data)))

library(ggplot2)
ggplot(frame,aes(y=values,x=date,group=date))+
  geom_boxplot()

Berdasarkan hasil plot di atas dapat terlihat bahwa data memiliki pola yang hampir sama dari tahun ke tahun sehingga dapat disimpulkan bahwa periode musimannya adalah 12. Selain itu, apabila dilihat dari boxplot, terlihat bahwa data cenderung homogen dari tahun ke tahun. Untuk memastikan bahwa data homogen akan dilakukan uji homogenitas dengan fligner.test.

Uji Homogenitas

fligner.test(values ~ date, data=frame)
## 
##  Fligner-Killeen test of homogeneity of variances
## 
## data:  values by date
## Fligner-Killeen:med chi-squared = 22.635, df = 11, p-value = 0.01989

Dari hasil uji di atas, dapat disimpulkan bahwa ragam data tidak stasioner. Selanjutnya, akan dilakukan transformasi data dengan transformasi Box-Cox.

Penanganan Ketidakstasioneran Ragam

frame$values <- log(frame$values)
fligner.test(values ~ date, data=frame)
## 
##  Fligner-Killeen test of homogeneity of variances
## 
## data:  values by date
## Fligner-Killeen:med chi-squared = 2.3751, df = 11, p-value = 0.9967

Setelah dilakukan transformasi Box-Cox, ragam data menjadi stasioner. Selanjutnya, akan dilakukan pemodelan data dengan menggunakan SARIMA.

Split Data

train <- subset(data,start=1,end=115)
test <- subset(data,start=116,end=144)

Plot Data Latih

autoplot(train) + theme_bw() + xlab("Year") + ylab("Passengers")

Plot Data Uji

autoplot(test) + theme_bw() + xlab("Year") + ylab("Passengers")

Non-Seasonal Arima

Kestasioneran Data

acf0 <- acf(train,main="ACF",lag.max=48,xaxt="n", col="blue")
axis(1, at=0:48/12, labels=0:48)

acf0$lag <- acf0$lag * 12
acf0.1 <- as.data.frame(cbind(acf0$acf,acf0$lag))
acf0.2 <- acf0.1[which(acf0.1$V2%%12==0),]
barplot(height = acf0.2$V1, 
names.arg=acf0.2$V2, ylab="ACF", xlab="Lag")

Berdasarkan plot deret sebelumnya diketahui bahwa perilaku deret berulang setiap tahun, atau dikatakan bahwa deret memiliki periode musiman bulanan, sehingga s=12.

tseries::adf.test(train)
## Warning in tseries::adf.test(train): p-value smaller than printed p-value
## 
##  Augmented Dickey-Fuller Test
## 
## data:  train
## Dickey-Fuller = -4.7724, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary

Berdasarkan hasil uji di atas, dapat disimpulkan bahwa data sudah stasioner dalam rataan. Namun, masih perlu dilakukan differencing karena plot ACF belum menunjukkan cuts-off pada lag tertentu.

Differencing

d1 <- diff(train)
ts.plot(d1, type="l", ylab="d1 Xt", col="blue")

Differencing non-seasonal d=1 jika dilihat berdasarkan plot di atas belum berhasil mengatasi ketidakstasioneran dalam rataan untuk komponen non-seasonal. Oleh karena itu, perlu dilakukan differencing kedua.

d2 <- diff(d1)
ts.plot(d2, type="l", ylab="d2 Xt", col="blue")

Differencing non-seasonal d=2 jika dilihat berdasarkan plot di atas sudah berhasil mengatasi ketidakstasioneran dalam rataan untuk komponen non-seasonal. Selanjutnya, akan dilakukan pemodelan SARIMA.

acf1 <- acf(d2,lag.max=48,xaxt="n", main="ACF d2", col="blue")
axis(1, at=0:48/12, labels=0:48)

Plot ACF data non-seasonal differencing d=2 mengkonfirmasi kestasioneran komponen non-seasonal (namun perhatikan lag 12,24, dst), pada series seasonal belum stasioner. Hal ini menunjukkan adanya kecenderungan musiman

Seasonal ARIMA

D1 <- diff(train,12)
ts.plot(D1, type="l", ylab="D1 Xt", col="blue")

acf2<-acf(D1,lag.max=48,xaxt="n", main="ACF D1", col="blue")

acf2$lag <- acf2$lag * 12
acf2.1 <- as.data.frame(cbind(acf2$acf,acf2$lag))
acf2.2 <- acf2.1[which(acf2.1$V2%%12==0),]
barplot(height = acf2.2$V1, names.arg=acf2.2$V2, ylab="ACF", xlab="Lag")

Non-seasonal differencing D = 12 berhasil mengatasi ketidakstasioneran dalam rataan untuk komponen seasonalnya (namun tidak untuk komponen non-seasonalnya).

Untuk menghilangkan kecenderungan musiman dilakukan pembedaan musiman terhadap deret hasil pembedaan pertama.

d1D1 <- diff(D1)
d2D1 <- diff(d1D1)
ts.plot(d2D1, type="l", ylab="d1 D1 Xt", col="blue")

Identifikasi Model

acf3 <- acf(d2D1,lag.max=48,xaxt="n", main="ACF d2D1", col="blue")
axis(1, at=0:48/12, labels=0:48)

acf3$lag <- acf3$lag * 12
acf3.1 <- as.data.frame(cbind(acf3$acf,acf3$lag))
acf3.2 <- acf3.1[which(acf3.1$V2%%12==0),]
barplot(height = acf3.2$V1, names.arg=acf3.2$V2, ylab="ACF", 
xlab="Lag")

Berdasarkan plot ACF contoh lag 3 signifikan sehingga dipilih ordo q=3 , dan lag 12 adalah satu-satunya lag musiman yang signifikan sehingga order Q=1.

pacf3 <- pacf(d2D1,lag.max=48,xaxt="n", main="PACF d2D1", col="blue")
axis(1, at=0:48/12, labels=0:48)

pacf3$lag <- pacf3$lag * 12
pacf3.1 <- as.data.frame(cbind(pacf3$acf,pacf3$lag))
pacf3.2 <- pacf3.1[which(pacf3.1$V2%%12==0),]
barplot(height = pacf3.2$V1, names.arg=pacf3.2$V2, ylab="PACF", xlab="Lag")

Plot PACF contoh menunjukkan cuts-off pada lag-4 sehingga ordo p=4, dan lag 12 adalah satu-satunya lag musiman yang signifikan sehingga order P=1.

TSA::eacf(d2D1)
## AR/MA
##   0 1 2 3 4 5 6 7 8 9 10 11 12 13
## 0 x x x o o o o o o o o  x  o  o 
## 1 x x o o o o o o o o o  o  o  o 
## 2 x o x o o o o o o o o  o  o  o 
## 3 x o x o o o o o o o o  o  o  o 
## 4 x x x o o o o o o o o  o  o  o 
## 5 o x o o o o o o o o o  o  o  o 
## 6 x o x x o o o o o o o  o  o  o 
## 7 x o x x o o o o o o o  o  o  o

Pendugaan Parameter

tmodel1 <- Arima(train,order=c(0,2,3),seasonal=c(1,1,1))
summary(tmodel1)
## Series: train 
## ARIMA(0,2,3)(1,1,1)[12] 
## 
## Coefficients:
##           ma1     ma2      ma3     sar1    sma1
##       -1.1307  0.2159  -0.0847  -0.2726  0.1354
## s.e.   0.1191  0.1276   0.1132   0.5358  0.5545
## 
## sigma^2 = 96.6:  log likelihood = -376.99
## AIC=765.98   AICc=766.88   BIC=781.67
## 
## Training set error measures:
##                      ME     RMSE      MAE        MPE     MAPE      MASE
## Training set -0.5380951 8.980136 6.858838 -0.3675756 2.865101 0.2331552
##                    ACF1
## Training set -0.0299136
lmtest::coeftest(tmodel1)
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value Pr(>|z|)    
## ma1  -1.130748   0.119146 -9.4904   <2e-16 ***
## ma2   0.215922   0.127634  1.6917   0.0907 .  
## ma3  -0.084734   0.113162 -0.7488   0.4540    
## sar1 -0.272631   0.535818 -0.5088   0.6109    
## sma1  0.135366   0.554460  0.2441   0.8071    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
tmodel2 <- Arima(train,order=c(4,2,0),seasonal=c(1,1,1))
summary(tmodel2)
## Series: train 
## ARIMA(4,2,0)(1,1,1)[12] 
## 
## Coefficients:
##           ar1      ar2      ar3      ar4     sar1    sma1
##       -0.8969  -0.5287  -0.4883  -0.3115  -0.3260  0.2203
## s.e.   0.0989   0.1241   0.1251   0.0954   0.6826  0.7038
## 
## sigma^2 = 116.3:  log likelihood = -383.95
## AIC=781.9   AICc=783.11   BIC=800.21
## 
## Training set error measures:
##                      ME     RMSE      MAE        MPE    MAPE      MASE
## Training set 0.05600994 9.802315 7.271877 -0.0899852 3.06814 0.2471958
##                     ACF1
## Training set -0.06909698
lmtest::coeftest(tmodel2)
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value  Pr(>|z|)    
## ar1  -0.896872   0.098873 -9.0710 < 2.2e-16 ***
## ar2  -0.528723   0.124119 -4.2598 2.046e-05 ***
## ar3  -0.488279   0.125066 -3.9042 9.455e-05 ***
## ar4  -0.311504   0.095357 -3.2667  0.001088 ** 
## sar1 -0.326020   0.682638 -0.4776  0.632943    
## sma1  0.220313   0.703755  0.3131  0.754240    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
tmodel3 <- Arima(train,order=c(0,2,1),seasonal=c(1,1,1))
summary(tmodel3)
## Series: train 
## ARIMA(0,2,1)(1,1,1)[12] 
## 
## Coefficients:
##           ma1     sar1     sma1
##       -0.9996  -0.1077  -0.0563
## s.e.   0.0288   0.3941   0.3925
## 
## sigma^2 = 97.54:  log likelihood = -378.54
## AIC=765.08   AICc=765.49   BIC=775.54
## 
## Training set error measures:
##                      ME     RMSE      MAE        MPE     MAPE      MASE
## Training set -0.5126478 9.117183 6.984594 -0.3532425 2.908872 0.2374301
##                    ACF1
## Training set -0.1719624
lmtest::coeftest(tmodel3)
## 
## z test of coefficients:
## 
##       Estimate Std. Error  z value Pr(>|z|)    
## ma1  -0.999588   0.028847 -34.6509   <2e-16 ***
## sar1 -0.107707   0.394093  -0.2733   0.7846    
## sma1 -0.056341   0.392504  -0.1435   0.8859    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
tmodel4 <- Arima(train,order=c(2,2,1),seasonal=c(1,1,1))
summary(tmodel4)
## Series: train 
## ARIMA(2,2,1)(1,1,1)[12] 
## 
## Coefficients:
##           ar1     ar2      ma1     sar1    sma1
##       -0.1624  0.0994  -0.9996  -0.3462  0.2187
## s.e.   0.1053  0.1004   0.0295   0.6286  0.6566
## 
## sigma^2 = 95.74:  log likelihood = -376.54
## AIC=765.09   AICc=765.98   BIC=780.78
## 
## Training set error measures:
##                      ME    RMSE      MAE        MPE     MAPE      MASE
## Training set -0.5502497 8.93985 6.806784 -0.3727131 2.846964 0.2313857
##                     ACF1
## Training set 0.002092598
lmtest::coeftest(tmodel4)
## 
## z test of coefficients:
## 
##       Estimate Std. Error  z value Pr(>|z|)    
## ar1  -0.162392   0.105296  -1.5422   0.1230    
## ar2   0.099429   0.100403   0.9903   0.3220    
## ma1  -0.999571   0.029487 -33.8990   <2e-16 ***
## sar1 -0.346249   0.628605  -0.5508   0.5818    
## sma1  0.218711   0.656643   0.3331   0.7391    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
tmodel5 <- Arima(train,order=c(3,2,0),seasonal=c(1,1,1))
summary(tmodel5)
## Series: train 
## ARIMA(3,2,0)(1,1,1)[12] 
## 
## Coefficients:
##           ar1      ar2      ar3     sar1    sma1
##       -0.8195  -0.3930  -0.2177  -0.3226  0.1798
## s.e.   0.0999   0.1218   0.0980   0.5165  0.5356
## 
## sigma^2 = 127.5:  log likelihood = -388.98
## AIC=789.95   AICc=790.84   BIC=805.64
## 
## Training set error measures:
##                      ME     RMSE      MAE         MPE     MAPE      MASE
## Training set 0.09891012 10.31806 7.712535 -0.04009503 3.249784 0.2621753
##                     ACF1
## Training set -0.07253388
lmtest::coeftest(tmodel5)
## 
## z test of coefficients:
## 
##       Estimate Std. Error z value  Pr(>|z|)    
## ar1  -0.819454   0.099919 -8.2011 2.381e-16 ***
## ar2  -0.393002   0.121805 -3.2265  0.001253 ** 
## ar3  -0.217717   0.098008 -2.2214  0.026323 *  
## sar1 -0.322584   0.516454 -0.6246  0.532225    
## sma1  0.179836   0.535581  0.3358  0.737039    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
AICKandidatModel <- c(tmodel1$aic, tmodel2$aic, tmodel3$aic,
                      tmodel4$aic, tmodel5$aic)
AICcKandidatModel <- c(tmodel1$aicc, tmodel2$aicc, tmodel3$aicc,
                       tmodel4$aicc, tmodel5$aicc)
BICKandidatModel <- c(tmodel1$bic, tmodel2$bic, tmodel3$bic,
                      tmodel4$bic, tmodel5$bic)
KandidatModelARIMA <- c("ARIMA(0,2,3)(1,1,1)12", "ARIMA(4,2,0)(1,1,1)12",
                        "ARIMA(0,2,1)(1,1,1)12", "ARIMA(2,2,1)(1,1,1)12",
                        "ARIMA(3,2,0)(1,1,1)12")
compmodelARIMA <- cbind(KandidatModelARIMA, AICKandidatModel,
                        AICcKandidatModel, BICKandidatModel)
colnames(compmodelARIMA) <- c("Kandidat Model", "Nilai AIC", 
                              "Nilai AICc", "Nilai BIC")
compmodelARIMA <- as.data.frame(compmodelARIMA)
compmodelARIMA
##          Kandidat Model        Nilai AIC       Nilai AICc        Nilai BIC
## 1 ARIMA(0,2,3)(1,1,1)12 765.982942163088 766.876559184364 781.673665264135
## 2 ARIMA(4,2,0)(1,1,1)12 781.901929678466 783.106230753735 800.207773296355
## 3 ARIMA(0,2,1)(1,1,1)12  765.07635946836 765.493026135027 775.536841535725
## 4 ARIMA(2,2,1)(1,1,1)12 765.087892069705 765.981509090981 780.778615170752
## 5 ARIMA(3,2,0)(1,1,1)12 789.950105081238 790.843722102515 805.640828182286

Model terbaik berdasarkan nilai AIC,AICc,signifikansi parameter, dan kesederhanaan model dari kandidat model yaitu ARIMA(0,2,1)×(1,1,1)12.

model.auto.arima <- auto.arima(train)
summary(model.auto.arima)
## Series: train 
## ARIMA(1,1,0)(0,1,0)[12] 
## 
## Coefficients:
##           ar1
##       -0.2048
## s.e.   0.0974
## 
## sigma^2 = 93.62:  log likelihood = -375.75
## AIC=755.5   AICc=755.62   BIC=760.75
## 
## Training set error measures:
##                     ME     RMSE      MAE        MPE     MAPE      MASE
## Training set 0.2145268 9.067565 6.898655 0.03414791 2.899017 0.2345087
##                    ACF1
## Training set 0.01529513
lmtest::coeftest(model.auto.arima)
## 
## z test of coefficients:
## 
##      Estimate Std. Error z value Pr(>|z|)  
## ar1 -0.204775   0.097376 -2.1029  0.03547 *
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Diagnostik Model

tsdisplay(residuals(tmodel3), lag.max=45, 
          main='ARIMA(0,2,1)(1,1,1)12 Model Residuals', col="blue")

sisaan.model <- tmodel3$residuals
par(mfrow=c(2,2))
car::qqPlot(sisaan.model)
## [1] 42 51
plot(c(1:length(sisaan.model)),sisaan.model)
acf(sisaan.model)
pacf(sisaan.model)

par(mfrow = c(1,1))

Sisaan Menyebar Normal

shapiro.test(sisaan.model)
## 
##  Shapiro-Wilk normality test
## 
## data:  sisaan.model
## W = 0.98242, p-value = 0.1367

Sisaan Saling Bebas

Box.test(sisaan.model, type = "Ljung") 
## 
##  Box-Ljung test
## 
## data:  sisaan.model
## X-squared = 3.4902, df = 1, p-value = 0.06173

Sisaan Homogen

Box.test((sisaan.model)^2, type = "Ljung") 
## 
##  Box-Ljung test
## 
## data:  (sisaan.model)^2
## X-squared = 2.9849, df = 1, p-value = 0.08405

Nilai tengah sisaan sama dengan nol

t.test(sisaan.model, mu = 0, conf.level = 0.95)
## 
##  One Sample t-test
## 
## data:  sisaan.model
## t = -0.60131, df = 114, p-value = 0.5488
## alternative hypothesis: true mean is not equal to 0
## 95 percent confidence interval:
##  -2.201546  1.176250
## sample estimates:
##  mean of x 
## -0.5126478

Overfitting

Model non-musiman(p,q)

tmodel3.ofq <- Arima(train,order=c(0,2,1),seasonal=c(1,1,1))
summary(tmodel3.ofq)
## Series: train 
## ARIMA(0,2,1)(1,1,1)[12] 
## 
## Coefficients:
##           ma1     sar1     sma1
##       -0.9996  -0.1077  -0.0563
## s.e.   0.0288   0.3941   0.3925
## 
## sigma^2 = 97.54:  log likelihood = -378.54
## AIC=765.08   AICc=765.49   BIC=775.54
## 
## Training set error measures:
##                      ME     RMSE      MAE        MPE     MAPE      MASE
## Training set -0.5126478 9.117183 6.984594 -0.3532425 2.908872 0.2374301
##                    ACF1
## Training set -0.1719624
lmtest::coeftest(tmodel3.ofq)
## 
## z test of coefficients:
## 
##       Estimate Std. Error  z value Pr(>|z|)    
## ma1  -0.999588   0.028847 -34.6509   <2e-16 ***
## sar1 -0.107707   0.394093  -0.2733   0.7846    
## sma1 -0.056341   0.392504  -0.1435   0.8859    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Ordo musiman (P, Q)

tmodel3.ofPQ <- Arima(train,order=c(0,2,1),seasonal=c(1,1,1))
summary(tmodel3.ofPQ)
## Series: train 
## ARIMA(0,2,1)(1,1,1)[12] 
## 
## Coefficients:
##           ma1     sar1     sma1
##       -0.9996  -0.1077  -0.0563
## s.e.   0.0288   0.3941   0.3925
## 
## sigma^2 = 97.54:  log likelihood = -378.54
## AIC=765.08   AICc=765.49   BIC=775.54
## 
## Training set error measures:
##                      ME     RMSE      MAE        MPE     MAPE      MASE
## Training set -0.5126478 9.117183 6.984594 -0.3532425 2.908872 0.2374301
##                    ACF1
## Training set -0.1719624
lmtest::coeftest(tmodel3.ofPQ)
## 
## z test of coefficients:
## 
##       Estimate Std. Error  z value Pr(>|z|)    
## ma1  -0.999588   0.028847 -34.6509   <2e-16 ***
## sar1 -0.107707   0.394093  -0.2733   0.7846    
## sma1 -0.056341   0.392504  -0.1435   0.8859    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1

Model overfitting yang dicobakan menghasilkan nilai AIC dan signifikansi parameter yang tidak lebih baik dari model awal. Oleh karena itu, model yang digunakan tetap model awal.

Peramalan

ramalan_sarima = forecast::forecast(tmodel3, 29)
ramalan_sarima
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Aug 1958       491.5540 478.8353 504.2727 472.1024 511.0056
## Sep 1958       431.0988 413.0213 449.1762 403.4517 458.7459
## Oct 1958       375.7902 353.5397 398.0407 341.7610 409.8194
## Nov 1958       335.1986 309.3793 361.0179 295.7114 374.6859
## Dec 1958       367.1753 338.1675 396.1831 322.8118 411.5389
## Jan 1959       372.2442 340.3142 404.1743 323.4114 421.0770
## Feb 1959       351.8931 317.2395 386.5466 298.8951 404.8911
## Mar 1959       397.8292 360.6073 435.0511 340.9033 454.7552
## Apr 1959       385.1264 345.4611 424.7916 324.4635 445.7892
## May 1959       399.0666 357.0611 441.0721 334.8247 463.3084
## Jun 1959       470.4115 426.1526 514.6704 402.7233 538.0996
## Jul 1959       524.5198 478.0816 570.9579 453.4987 595.5408
## Aug 1959       525.4872 472.8218 578.1525 444.9424 606.0319
## Sep 1959       465.0155 406.6907 523.3403 375.8154 554.2155
## Oct 1959       409.7823 346.2141 473.3505 312.5632 507.0014
## Nov 1959       369.2967 300.8055 437.7879 264.5485 474.0449
## Dec 1959       401.4258 328.2673 474.5844 289.5395 513.3122
## Jan 1960       406.6372 329.0209 484.2536 287.9333 525.3412
## Feb 1960       386.3661 304.4673 468.2650 261.1127 511.6196
## Mar 1960       432.3514 346.3192 518.3836 300.7765 563.9263
## Apr 1960       419.7664 329.7294 509.8034 282.0667 557.4661
## May 1960       434.0784 340.1488 528.0081 290.4254 577.7314
## Jun 1960       505.7515 408.0280 603.4751 356.2962 655.2068
## Jul 1960       560.3212 458.8914 661.7510 405.1977 715.4447
## Aug 1960       561.5017 453.0729 669.9305 395.6742 727.3292
## Sep 1960       501.2894 386.1843 616.3946 325.2513 677.3276
## Oct 1960       446.3058 324.7936 567.8179 260.4690 632.1425
## Nov 1960       406.0664 278.3761 533.7566 210.7810 601.3517
## Dec 1960       438.4367 304.7655 572.1080 234.0042 642.8693
autoplot(ramalan_sarima, col="blue")

accuracy(ramalan_sarima,test)
##                      ME      RMSE       MAE        MPE     MAPE      MASE
## Training set -0.5126478  9.117183  6.984594 -0.3532425 2.908872 0.2374301
## Test set      6.8004298 23.499139 18.479687  0.8653672 4.085490 0.6281874
##                    ACF1 Theil's U
## Training set -0.1719624        NA
## Test set      0.5730928 0.4484803

Berdasarkan hasil MAPE yang dihasilkan, model yang didapatkan dapat dikatakan cukup baik untuk meramal data ini.