Dataset ini berisi data cuaca harian di kota Delhi, India dari 1 Januari 2016 hingga 31 Desember 2016. Terdapat 4 parameter di data, yaitu temperatur, kelembaban, kecepatan angin, dan tekanan. Peubah yang akan digunakan yaitu kelembaban dan temperatur. Data ini bersumber dari kaggle.com
library(dLagM)
## Loading required package: nardl
## Registered S3 method overwritten by 'quantmod':
## method from
## as.zoo.data.frame zoo
## Loading required package: dynlm
## Loading required package: zoo
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
library(dynlm)
library(MLmetrics)
##
## Attaching package: 'MLmetrics'
## The following object is masked from 'package:dLagM':
##
## MAPE
## The following object is masked from 'package:base':
##
## Recall
library(lmtest)
library(car)
## Loading required package: carData
data <- read.csv("C:/0 SEM5/MPDW/kuliah/Data Tugas 2.csv")
str(data)
## 'data.frame': 366 obs. of 5 variables:
## $ date : chr "2016-01-01" "2016-01-02" "2016-01-03" "2016-01-04" ...
## $ meantemp : num 14.7 14 14.4 15.8 15.8 ...
## $ humidity : num 72.3 75.9 74.8 77.1 88.8 ...
## $ wind_speed : num 1.057 2.087 5.113 0 0.617 ...
## $ meanpressure: num 1021 1021 1018 1018 1017 ...
colnames(data)[1] <- "date"
colnames(data)[3] <- "Yt"
colnames(data)[2] <- "Xt"
data$Xt <- as.numeric(data$Xt)
data$Yt <- as.numeric(data$Yt)
View(data)
Split Data adalah metode membagi data menjadi dua bagian. Bagian pertama atau Data Training digunakan untuk membuat model, bagian kedua atau Data Testing digunakan untuk menguji model.
train<-data[1:292,]
test<-data[293:365,]
# Data Time Series
train.ts<-ts(train)
test.ts<-ts(test)
data.ts<-ts(data)
Metode Koyck didasari asumsi bahwa semakin jauh jarak lag pada variabel bebas dari periode sekarang maka semakin kecil pengaruh variabel lag terhadap variabel takbebas (Aqibah et al. 2020)
model.koyck = dLagM::koyckDlm(x = train$Xt, y = train$Yt)
summary(model.koyck)
##
## Call:
## "Y ~ (Intercept) + Y.1 + X.t"
##
## Residuals:
## Min 1Q Median 3Q Max
## -22.0814 -4.2197 -0.8517 3.8078 20.2773
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 4.76581 3.43365 1.388 0.166
## Y.1 0.90944 0.02901 31.353 <2e-16 ***
## X.t 0.01508 0.07740 0.195 0.846
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 7.345 on 288 degrees of freedom
## Multiple R-Squared: 0.8219, Adjusted R-squared: 0.8207
## Wald test: 665.2 on 2 and 288 DF, p-value: < 2.2e-16
##
## Diagnostic tests:
## NULL
##
## alpha beta phi
## Geometric coefficients: 52.62673 0.01508206 0.9094413
AIC(model.koyck)
## [1] 1991.324
BIC(model.koyck)
## [1] 2006.018
AIC (Akaike’s Information Criteria) dan BIC (Bayesian Information Criteria) digunakan untuk mengukur kebaikan model. Semakin kecil nilai AIC dan BIC, maka semakin baik model.
(fore.koyck <- forecast(model = model.koyck, x=test$Xt, h=73))
## $forecasts
## [1] 49.27208 50.01441 50.67945 51.29820 51.83326 52.33496 52.78428 53.18593
## [9] 53.53580 53.83835 54.11797 54.35735 54.57072 54.76244 54.92689 55.10529
## [17] 55.25510 55.36570 55.46343 55.57674 55.66750 55.73902 55.81188 55.88420
## [25] 55.94422 56.01023 56.05593 56.08081 56.09634 56.10988 56.12219 56.11734
## [33] 56.13681 56.14379 56.16583 56.18959 56.19002 56.22327 56.26397 56.29108
## [41] 56.29790 56.29138 56.25919 56.22811 56.19167 56.18870 56.15118 56.11309
## [49] 56.07697 56.03976 55.98557 55.97424 55.91911 55.92323 55.92497 55.91360
## [57] 55.89580 55.87375 55.81850 55.79339 55.74919 55.73575 55.75282 55.74196
## [65] 55.72055 55.67491 55.66006 55.59651 55.58612 55.57226 55.56519 55.52891
## [73] 55.47867
##
## $call
## forecast.koyckDlm(model = model.koyck, x = test$Xt, h = 73)
##
## attr(,"class")
## [1] "forecast.koyckDlm" "dLagM"
mape.koyck <- MAPE(fore.koyck$forecasts, test$Yt)
mape_train <- dLagM::GoF(model.koyck)["MAPE"]
c("MAPE_testing" = mape.koyck, "MAPE_taining" = mape_train)
## $MAPE_testing
## [1] 0.1542487
##
## $MAPE_taining.MAPE
## [1] 0.1044997
Lag adalah waktu yang diperlukan peubah x dalam mempengaruhi peubah y.
model.dlm = dLagM::dlm(x = train$Xt,y = train$Yt, q=2)
summary(model.dlm)
##
## Call:
## lm(formula = model.formula, data = design)
##
## Residuals:
## Min 1Q Median 3Q Max
## -34.735 -10.396 1.032 10.844 31.908
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 95.5225 3.8105 25.068 < 2e-16 ***
## x.t -2.7795 0.5103 -5.446 1.11e-07 ***
## x.1 0.6573 0.6849 0.960 0.338
## x.2 0.8202 0.5061 1.621 0.106
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 14.61 on 286 degrees of freedom
## Multiple R-squared: 0.2974, Adjusted R-squared: 0.29
## F-statistic: 40.35 on 3 and 286 DF, p-value: < 2.2e-16
##
## AIC and BIC values for the model:
## AIC BIC
## 1 2384.458 2402.807
(fore.dlm <- forecast(model = model.dlm, x=test$Xt, h=73))#meramalkan 73 periode ke depan
## $forecasts
## [1] 57.31445 57.09269 59.39757 56.51348 61.66904 58.44252 58.87464 60.67966
## [9] 62.83689 64.66817 62.32517 64.41751 64.81010 64.23766 65.72772 59.85295
## [17] 62.86468 68.61480 67.34748 61.32746 64.50105 67.32515 64.73532 63.36081
## [25] 65.11053 63.08261 65.90876 68.98041 68.78249 67.67263 67.26121 70.18737
## [33] 65.08698 67.23266 65.17160 64.58497 69.50621 62.72890 61.07938 65.15115
## [41] 68.57305 69.60106 72.91796 71.41372 71.41222 65.39903 72.68547 73.54143
## [49] 71.74686 72.26805 75.75051 67.63135 76.43968 66.55164 66.84673 72.09805
## [57] 72.79894 72.85020 78.67460 72.18930 75.30861 70.81411 65.41782 73.01404
## [65] 75.58171 78.46838 71.78583 80.74740 71.28094 71.43910 72.80332 78.30613
## [73] 80.48306
##
## $call
## forecast.dlm(model = model.dlm, x = test$Xt, h = 73)
##
## attr(,"class")
## [1] "forecast.dlm" "dLagM"
mape.dlm <- MAPE(fore.dlm$forecasts,test$Yt)
mape_train <- GoF(model.dlm)["MAPE"]
c("MAPE_testing" = mape.dlm, "MAPE_training" = mape_train)
## $MAPE_testing
## [1] 0.1941691
##
## $MAPE_training.MAPE
## [1] 0.256812
Lag optimum ditentukan dengan mencari nilai AIC, BIC, dan MAPE yang terkecil.
finiteDLMauto(formula = Yt ~ Xt,
data = data.frame(train),q.min = 1,q.max = 150,
model.type = "dlm",error.type = "AIC", trace = TRUE)
## q - k MASE AIC BIC GMRAE MBRAE R.Adj.Sq Ljung-Box
## 145 145 0.00000 -Inf -Inf 0.00000 0.00342 NaN NaN
## 146 146 0.00000 -Inf -Inf 0.00000 0.00345 NaN NaN
## 147 147 0.00000 -Inf -Inf 0.00000 0.00347 NaN NaN
## 148 148 0.00000 -Inf -Inf 0.00000 0.00350 NaN NaN
## 149 149 0.00000 -Inf -Inf 0.00000 0.00352 NaN NaN
## 150 150 0.00000 -Inf -Inf 0.00000 0.00355 NaN NaN
## 144 144 0.03048 294.1069 734.6971 0.03861 -0.00769 0.97549 4.649712e-03
## 143 143 0.04182 381.2592 819.8354 0.05170 -0.05508 0.97761 1.165236e-06
## 142 142 0.07234 537.4842 974.0263 0.09094 -0.09405 0.95728 8.983555e-01
## 141 141 0.09922 635.0782 1069.5665 0.12306 -0.09742 0.93862 8.548037e-03
## 140 140 0.11611 686.1147 1118.5296 0.14665 -0.20443 0.93142 8.041761e-02
## 139 139 0.13542 737.8353 1168.1575 0.16518 0.20251 0.92060 1.051675e-03
## 138 138 0.18778 839.2410 1267.4514 0.23816 -0.09135 0.87324 1.758376e-04
## 137 137 0.20508 859.7359 1285.8154 0.26880 0.02275 0.87870 2.712973e-09
## 136 136 0.23199 901.1303 1325.0603 0.26453 -0.54482 0.86580 8.893515e-10
## 135 135 0.25378 934.4891 1356.2510 0.32990 1.07882 0.85540 1.096656e-11
## 134 134 0.26402 948.3115 1367.8870 0.34194 4.44792 0.86049 1.776357e-15
## 133 133 0.27177 958.4240 1375.7950 0.33993 -0.87108 0.86831 0.000000e+00
## 132 132 0.29514 989.1766 1404.3251 0.36786 0.00564 0.85486 3.552714e-15
## 131 131 0.30363 1012.0141 1424.9223 0.37085 0.70426 0.84572 0.000000e+00
## 130 130 0.30461 1024.5647 1435.2150 0.34726 1.65389 0.84697 0.000000e+00
## 129 129 0.32239 1037.4018 1445.7768 0.39500 -0.00210 0.84720 0.000000e+00
## 128 128 0.33952 1061.0424 1467.1249 0.38680 0.00893 0.83734 0.000000e+00
## 127 127 0.36285 1083.8586 1487.6315 0.45002 0.04051 0.82712 0.000000e+00
## 126 126 0.37238 1092.8224 1494.2688 0.46042 6.74952 0.83058 0.000000e+00
## 125 125 0.37787 1100.4377 1499.5410 0.45124 0.12189 0.83406 0.000000e+00
## 124 124 0.38971 1112.8889 1509.6324 0.42014 -0.36217 0.83437 0.000000e+00
## 123 123 0.42003 1127.9621 1522.3293 0.54267 5.82929 0.83208 0.000000e+00
## 122 122 0.43940 1147.1034 1539.0782 0.51275 35.98313 0.82883 0.000000e+00
## 121 121 0.44205 1151.5211 1541.0873 0.51937 -0.65987 0.83804 0.000000e+00
## 120 120 0.44204 1153.8850 1541.0268 0.51395 0.71401 0.84761 0.000000e+00
## 119 119 0.44656 1157.7763 1542.4779 0.54000 0.43485 0.85548 0.000000e+00
## 118 118 0.44695 1160.2435 1542.4892 0.55451 0.65551 0.86403 0.000000e+00
## 117 117 0.44598 1163.3660 1543.1403 0.52499 -1.19340 0.86996 0.000000e+00
## 116 116 0.44580 1166.7942 1544.0818 0.52167 -3.85405 0.87582 0.000000e+00
## 115 115 0.46140 1177.3220 1552.1076 0.55905 -0.19047 0.87672 0.000000e+00
## 114 114 0.46683 1183.6858 1555.9545 0.56603 -5.85616 0.88039 0.000000e+00
## 113 113 0.49243 1199.3086 1569.0454 0.64157 -0.61128 0.87762 0.000000e+00
## 112 112 0.49796 1211.4649 1578.6550 0.60886 44.36606 0.87666 0.000000e+00
## 111 111 0.50299 1216.6884 1581.3171 0.61590 0.17362 0.88019 0.000000e+00
## 110 110 0.52653 1228.0366 1590.0893 0.64973 1.01515 0.87931 0.000000e+00
## 109 109 0.54731 1242.6639 1602.1264 0.70317 -2.37477 0.87519 0.000000e+00
## 108 108 0.58419 1268.2922 1625.1501 0.71655 0.00442 0.86337 0.000000e+00
## 107 107 0.58929 1276.4063 1630.6455 0.74035 1.82570 0.86486 0.000000e+00
## 106 106 0.61114 1295.9999 1647.6063 0.75326 -0.52312 0.85772 0.000000e+00
## 105 105 0.61258 1300.6788 1649.6385 0.74220 0.40472 0.86140 0.000000e+00
## 104 104 0.62517 1305.7583 1652.0576 0.76928 1.27365 0.86479 0.000000e+00
## 103 103 0.62513 1309.1000 1652.7252 0.79915 0.08472 0.86864 0.000000e+00
## 102 102 0.63002 1314.6720 1655.6096 0.83519 -4.57399 0.86996 0.000000e+00
## 101 101 0.63317 1317.9779 1656.2143 0.82660 4.43526 0.87282 0.000000e+00
## 100 100 0.62961 1320.8135 1656.3355 0.81702 0.53742 0.87641 0.000000e+00
## 99 99 0.62787 1323.9574 1656.7518 0.80519 -77.20906 0.87928 0.000000e+00
## 98 98 0.62530 1327.5984 1657.6521 0.78267 -0.43474 0.88144 0.000000e+00
## 97 97 0.62640 1331.9620 1659.2619 0.79396 -121.52858 0.88300 0.000000e+00
## 96 96 0.62929 1337.8138 1662.3471 0.79602 4.80465 0.88389 0.000000e+00
## 95 95 0.63210 1343.3011 1665.0551 0.77592 0.40147 0.88526 0.000000e+00
## 94 94 0.65274 1361.3593 1680.3212 0.76358 -0.05344 0.87880 0.000000e+00
## 93 93 0.70871 1397.0605 1713.2178 0.87743 0.94227 0.85937 0.000000e+00
## 92 92 0.72793 1410.3923 1723.7324 0.89356 0.15646 0.85461 0.000000e+00
## 91 91 0.74467 1418.2795 1728.7902 0.94043 0.48540 0.85375 0.000000e+00
## 90 90 0.75714 1425.8463 1733.5152 0.97028 0.57925 0.85299 0.000000e+00
## 89 89 0.76141 1430.0069 1734.8218 0.98908 0.52174 0.85466 0.000000e+00
## 88 88 0.76015 1437.1256 1739.0745 0.94110 -0.18651 0.85378 0.000000e+00
## 87 87 0.76323 1443.5006 1742.5715 0.96872 -0.12931 0.85332 0.000000e+00
## 86 86 0.76929 1450.0419 1746.2228 0.95152 0.67235 0.85270 0.000000e+00
## 85 85 0.77610 1459.4574 1752.7367 0.95370 -0.00558 0.85013 0.000000e+00
## 84 84 0.78279 1472.0317 1762.3975 0.94318 -0.05075 0.84509 0.000000e+00
## 83 83 0.79356 1479.8303 1767.2711 1.02542 0.53781 0.84362 0.000000e+00
## 82 82 0.79397 1484.9664 1769.4706 1.02754 -1.03960 0.84428 0.000000e+00
## 81 81 0.79526 1490.1786 1771.7347 1.00512 -0.06365 0.84519 0.000000e+00
## 80 80 0.79599 1496.0124 1774.6091 0.99771 0.77231 0.84527 0.000000e+00
## 79 79 0.80889 1512.6123 1788.2382 1.01087 0.55894 0.83704 0.000000e+00
## 78 78 0.83128 1532.4148 1805.0588 1.00738 0.16127 0.82594 0.000000e+00
## 77 77 0.85073 1552.3831 1822.0342 1.01470 1.14197 0.81400 0.000000e+00
## 76 76 0.85126 1560.4786 1827.1256 1.01621 0.38501 0.81191 0.000000e+00
## 75 75 0.84946 1567.7307 1831.3627 0.99619 2.52999 0.81048 0.000000e+00
## 74 74 0.85434 1580.5003 1841.1064 0.93400 1.49922 0.80419 0.000000e+00
## 73 73 0.86395 1586.7011 1844.2705 0.97150 0.54925 0.80400 0.000000e+00
## 72 72 0.87166 1593.0407 1847.5628 0.99409 0.22458 0.80362 0.000000e+00
## 71 71 0.89073 1603.1922 1854.6562 1.03993 0.57468 0.79991 0.000000e+00
## 70 70 0.90489 1610.6128 1859.0083 1.05297 0.50859 0.79870 0.000000e+00
## 69 69 0.90485 1615.2519 1860.5683 1.04683 0.25379 0.79953 0.000000e+00
## 68 68 0.90690 1619.5763 1861.8032 1.08929 0.76322 0.80062 0.000000e+00
## 67 67 0.90974 1623.9825 1863.1096 1.12690 1.19663 0.80165 0.000000e+00
## 66 66 0.90809 1627.7122 1863.7291 1.12215 2.18259 0.80340 0.000000e+00
## 65 65 0.91299 1634.3445 1867.2411 1.13768 0.62367 0.80274 0.000000e+00
## 64 64 0.90972 1642.2115 1871.9776 1.11895 -0.23199 0.80062 0.000000e+00
## 63 63 0.90378 1647.0678 1873.6934 1.10091 0.36615 0.80144 0.000000e+00
## 62 62 0.89952 1651.3465 1874.8217 1.09440 0.17200 0.80241 0.000000e+00
## 61 61 0.90131 1658.0584 1878.3731 1.06919 -0.74973 0.80127 0.000000e+00
## 60 60 0.89963 1663.5855 1880.7300 1.00514 1.37272 0.80114 0.000000e+00
## 59 59 0.90115 1671.6556 1885.6200 1.01673 0.73843 0.79902 0.000000e+00
## 58 58 0.90206 1677.8601 1888.6347 0.95328 0.36625 0.79846 0.000000e+00
## 57 57 0.90855 1683.8795 1891.4547 1.04683 0.02487 0.79790 0.000000e+00
## 56 56 0.91935 1692.0424 1896.4084 1.04239 -2.31417 0.79545 0.000000e+00
## 55 55 0.93312 1699.8518 1900.9992 1.08411 1.86279 0.79329 0.000000e+00
## 54 54 0.94935 1709.9884 1907.9078 1.06687 0.63042 0.78908 0.000000e+00
## 53 53 0.96784 1724.7416 1919.4236 1.15386 0.03796 0.78064 0.000000e+00
## 52 52 0.99487 1741.5794 1933.0145 1.14478 0.65031 0.76992 0.000000e+00
## 51 51 1.01096 1753.2645 1941.4436 1.21012 0.38161 0.76393 0.000000e+00
## 50 50 1.00300 1757.2316 1942.1453 1.22402 0.45050 0.76636 0.000000e+00
## 49 49 1.00749 1762.5656 1944.2048 1.22971 0.43966 0.76712 0.000000e+00
## 48 48 1.01099 1770.1727 1948.5282 1.16423 1.36324 0.76635 0.000000e+00
## 47 47 1.02335 1794.2767 1969.3397 1.13097 1.17779 0.74780 0.000000e+00
## 46 46 1.08572 1821.7955 1993.5568 1.15208 0.49353 0.72409 0.000000e+00
## 45 45 1.14191 1843.9042 2012.3549 1.33178 -52.50597 0.70494 0.000000e+00
## 44 44 1.18517 1859.1673 2024.2985 1.35665 0.62574 0.69332 0.000000e+00
## 43 43 1.24663 1876.1839 2037.9868 1.53675 -0.27335 0.67908 0.000000e+00
## 42 42 1.28888 1891.5854 2050.0511 1.62048 0.98526 0.66627 0.000000e+00
## 41 41 1.32937 1909.2813 2064.4012 1.59524 0.25982 0.64983 0.000000e+00
## 40 40 1.35348 1920.3338 2072.0992 1.72459 0.33404 0.64269 0.000000e+00
## 39 39 1.38146 1933.0473 2081.4496 1.67273 0.81549 0.63332 0.000000e+00
## 38 38 1.41518 1947.2704 2092.3011 1.75379 0.13146 0.62149 0.000000e+00
## 37 37 1.42917 1961.5443 2103.1949 1.71373 1.44744 0.61102 0.000000e+00
## 36 36 1.47871 1979.8742 2118.1361 1.81162 2.16108 0.59246 0.000000e+00
## 35 35 1.54347 2003.5157 2138.3806 1.92073 40.83921 0.56443 0.000000e+00
## 34 34 1.56881 2021.7753 2153.2348 1.99316 0.32648 0.54298 0.000000e+00
## 33 33 1.58949 2037.0793 2165.1251 2.04016 0.27461 0.52675 0.000000e+00
## 32 32 1.60815 2054.8242 2179.4481 1.96143 -3.87658 0.50467 0.000000e+00
## 31 31 1.62199 2067.7192 2188.9129 1.89672 0.75557 0.49195 0.000000e+00
## 30 30 1.63041 2076.3088 2194.0642 2.01963 0.30703 0.49026 0.000000e+00
## 29 29 1.64572 2085.3999 2199.7089 2.03597 0.48562 0.48764 0.000000e+00
## 28 28 1.66556 2095.6995 2206.5539 2.14458 0.99918 0.48060 0.000000e+00
## 27 27 1.69820 2108.4930 2215.8849 2.14134 -3.86866 0.46826 0.000000e+00
## 26 26 1.71387 2117.1129 2221.0343 2.21425 0.62618 0.46414 0.000000e+00
## 25 25 1.71936 2125.2308 2225.6738 2.15756 0.70264 0.46375 0.000000e+00
## 24 24 1.74349 2136.3434 2233.3001 2.24791 0.74260 0.45599 0.000000e+00
## 23 23 1.76559 2146.7986 2240.2611 2.31296 38.51678 0.45329 0.000000e+00
## 22 22 1.77599 2158.3448 2248.3054 2.28919 0.82750 0.44463 0.000000e+00
## 21 21 1.79726 2170.5893 2257.0401 2.31503 0.60158 0.43694 0.000000e+00
## 20 20 1.81932 2183.8625 2266.7959 2.34683 1.14689 0.42372 0.000000e+00
## 19 19 1.82901 2197.8458 2277.2542 2.27625 0.70311 0.41237 0.000000e+00
## 18 18 1.85388 2210.2590 2286.1347 2.35880 3.41604 0.40616 0.000000e+00
## 17 17 1.87559 2224.0738 2296.4092 2.38807 1.07178 0.39339 0.000000e+00
## 16 16 1.90319 2240.4315 2309.2191 2.30862 0.75394 0.37370 0.000000e+00
## 15 15 1.94380 2260.2001 2325.4324 2.37663 -2.22961 0.34632 0.000000e+00
## 14 14 1.96271 2270.8102 2332.4798 2.40865 0.60510 0.34166 0.000000e+00
## 13 13 1.97898 2278.9873 2337.0867 2.52352 1.18533 0.34119 0.000000e+00
## 12 12 1.98503 2288.2371 2342.7589 2.45369 0.33997 0.33661 0.000000e+00
## 11 11 1.99701 2296.6578 2347.5948 2.55773 0.94609 0.33246 0.000000e+00
## 10 10 1.99553 2304.8365 2352.1813 2.51465 5.44293 0.33012 0.000000e+00
## 9 9 2.01293 2314.3506 2358.0960 2.53371 1.62912 0.32443 0.000000e+00
## 8 8 2.02381 2325.7254 2365.8641 2.38956 -0.88201 0.31240 0.000000e+00
## 7 7 2.03821 2336.5565 2373.0814 2.44771 0.72565 0.30652 0.000000e+00
## 6 6 2.07258 2349.1668 2382.0707 2.56925 1.32040 0.29791 0.000000e+00
## 5 5 2.08786 2359.2712 2388.5471 2.53130 1.86625 0.29300 0.000000e+00
## 4 4 2.09455 2367.9099 2393.5506 2.65759 0.47505 0.29488 0.000000e+00
## 3 3 2.06821 2375.5164 2397.5149 2.48524 0.69709 0.29444 0.000000e+00
## 2 2 2.08330 2384.4578 2402.8072 2.59019 4.99611 0.29002 0.000000e+00
## 1 1 2.08456 2392.3365 2407.0298 2.57200 0.94080 0.28850 0.000000e+00
Didapatkan AIC, BIC, dan MAPE yang terkecil pada lag sebesar 144.
model.dlm2 = dLagM::dlm(x = train$Xt,y = train$Yt, q= 144)
summary(model.dlm2)
##
## Call:
## lm(formula = model.formula, data = design)
##
## Residuals:
## 1 2 3 4 5 6 7 8
## 0.546151 -0.079620 -0.871853 0.352067 -0.135916 -0.056101 0.345214 -0.222572
## 9 10 11 12 13 14 15 16
## -0.135679 0.075688 -0.073454 0.643695 -0.153160 -0.012459 -0.211338 -0.053666
## 17 18 19 20 21 22 23 24
## 0.185295 -0.060850 -0.313127 -0.015623 -0.026443 0.227919 0.298727 -0.520421
## 25 26 27 28 29 30 31 32
## 0.210131 0.295790 -0.011965 0.170087 -0.346776 -0.037997 -0.051678 0.001010
## 33 34 35 36 37 38 39 40
## 0.267056 -0.299658 -0.232971 0.060470 0.061712 0.186472 0.097091 -0.018278
## 41 42 43 44 45 46 47 48
## -0.137485 0.338809 -0.040784 -0.017954 -0.157533 0.054329 -0.124856 0.074702
## 49 50 51 52 53 54 55 56
## -0.164623 0.055279 0.051916 -0.364016 0.298686 -0.134447 -0.023270 0.317984
## 57 58 59 60 61 62 63 64
## 0.084113 0.208392 -0.279601 -0.148546 0.026321 -0.064540 0.196051 -0.093946
## 65 66 67 68 69 70 71 72
## -0.273720 0.009284 0.244013 -0.139656 0.262488 -0.296390 -0.013948 0.213222
## 73 74 75 76 77 78 79 80
## 0.121438 -0.028874 -0.368319 0.040952 0.307595 0.042113 -0.203388 0.295373
## 81 82 83 84 85 86 87 88
## -0.269867 -0.110288 0.473680 -0.185278 -0.145480 -0.242673 0.158660 0.168570
## 89 90 91 92 93 94 95 96
## -0.132925 0.204380 -0.283073 0.081981 0.306454 0.053740 -0.330785 -0.066150
## 97 98 99 100 101 102 103 104
## 0.186697 -0.445768 0.116062 0.239342 0.379446 -0.528547 0.325789 0.114519
## 105 106 107 108 109 110 111 112
## -0.449418 0.051465 0.013806 -0.205828 0.065825 0.088628 -0.127421 -0.040205
## 113 114 115 116 117 118 119 120
## 0.355178 0.190303 -0.075238 -0.063560 0.351182 -0.417889 -0.242732 0.233972
## 121 122 123 124 125 126 127 128
## -0.053180 -0.130660 0.207343 0.099792 -0.318099 -0.058651 0.150198 0.200829
## 129 130 131 132 133 134 135 136
## -0.040377 0.109165 -0.011722 -0.224993 0.371059 0.157638 -0.165196 -0.338647
## 137 138 139 140 141 142 143 144
## -0.036745 0.060832 -0.110171 -0.174037 -0.065540 -0.214000 0.256508 0.584765
## 145 146 147 148
## -0.135112 0.172839 0.286479 -0.627008
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -10.56312 41.72565 -0.253 0.8238
## x.t -2.11200 0.40504 -5.214 0.0349 *
## x.1 2.00914 0.51993 3.864 0.0609 .
## x.2 0.57928 0.49361 1.174 0.3614
## x.3 1.23142 0.47000 2.620 0.1200
## x.4 0.06545 0.42088 0.156 0.8907
## x.5 0.01909 0.44111 0.043 0.9694
## x.6 0.59262 0.41242 1.437 0.2873
## x.7 1.18455 0.38872 3.047 0.0929 .
## x.8 -2.24387 0.44946 -4.992 0.0379 *
## x.9 -0.56927 0.45030 -1.264 0.3335
## x.10 -0.86692 0.43163 -2.008 0.1824
## x.11 -0.22365 0.42506 -0.526 0.6513
## x.12 -0.78048 0.35540 -2.196 0.1592
## x.13 -0.10473 0.34993 -0.299 0.7930
## x.14 -0.54052 0.35215 -1.535 0.2646
## x.15 -0.96554 0.37344 -2.586 0.1227
## x.16 0.81729 0.35566 2.298 0.1484
## x.17 1.49151 0.38165 3.908 0.0597 .
## x.18 0.07448 0.34060 0.219 0.8472
## x.19 -0.68516 0.38149 -1.796 0.2143
## x.20 0.69374 0.36769 1.887 0.1998
## x.21 -0.15321 0.35104 -0.436 0.7051
## x.22 -0.64397 0.36565 -1.761 0.2203
## x.23 0.43341 0.36577 1.185 0.3578
## x.24 -0.36751 0.33441 -1.099 0.3864
## x.25 -0.67403 0.32881 -2.050 0.1769
## x.26 -1.01064 0.34864 -2.899 0.1013
## x.27 -0.20362 0.35316 -0.577 0.6225
## x.28 -1.10920 0.35849 -3.094 0.0905 .
## x.29 -0.92645 0.31963 -2.899 0.1013
## x.30 0.48640 0.37342 1.303 0.3225
## x.31 -0.09930 0.38667 -0.257 0.8213
## x.32 -0.06281 0.33574 -0.187 0.8689
## x.33 0.49033 0.32104 1.527 0.2662
## x.34 0.94102 0.34451 2.731 0.1120
## x.35 -0.43709 0.43276 -1.010 0.4188
## x.36 0.07382 0.44374 0.166 0.8832
## x.37 0.99624 0.45045 2.212 0.1575
## x.38 -0.19579 0.45275 -0.432 0.7076
## x.39 -0.34164 0.47749 -0.715 0.5486
## x.40 -0.47539 0.50800 -0.936 0.4482
## x.41 0.80823 0.51822 1.560 0.2592
## x.42 -0.77278 0.52937 -1.460 0.2818
## x.43 0.17123 0.46915 0.365 0.7501
## x.44 0.71282 0.41860 1.703 0.2307
## x.45 0.14263 0.39922 0.357 0.7551
## x.46 -0.20398 0.34728 -0.587 0.6164
## x.47 1.06767 0.34238 3.118 0.0893 .
## x.48 0.61321 0.34913 1.756 0.2211
## x.49 -1.40478 0.48793 -2.879 0.1024
## x.50 1.07587 0.54961 1.958 0.1894
## x.51 -0.22540 0.49816 -0.452 0.6953
## x.52 1.17060 0.48889 2.394 0.1390
## x.53 0.32559 0.48478 0.672 0.5710
## x.54 -0.10929 0.47687 -0.229 0.8400
## x.55 0.27070 0.46169 0.586 0.6170
## x.56 -0.63363 0.45807 -1.383 0.3008
## x.57 1.92199 0.50168 3.831 0.0619 .
## x.58 0.08063 0.48116 0.168 0.8823
## x.59 -1.41136 0.47106 -2.996 0.0957 .
## x.60 -0.43965 0.44794 -0.982 0.4298
## x.61 0.11344 0.48083 0.236 0.8354
## x.62 0.31978 0.48838 0.655 0.5799
## x.63 -0.42155 0.46614 -0.904 0.4613
## x.64 0.17469 0.46699 0.374 0.7443
## x.65 -1.43609 0.44027 -3.262 0.0825 .
## x.66 0.33564 0.42051 0.798 0.5085
## x.67 1.12776 0.45006 2.506 0.1291
## x.68 0.70287 0.44853 1.567 0.2576
## x.69 -0.53393 0.39326 -1.358 0.3074
## x.70 0.05196 0.41138 0.126 0.9110
## x.71 0.40017 0.38843 1.030 0.4112
## x.72 0.83719 0.38070 2.199 0.1589
## x.73 0.96319 0.37047 2.600 0.1215
## x.74 -0.20041 0.33510 -0.598 0.6105
## x.75 0.37144 0.33393 1.112 0.3818
## x.76 0.07858 0.31154 0.252 0.8244
## x.77 1.53432 0.34581 4.437 0.0472 *
## x.78 1.21122 0.35580 3.404 0.0765 .
## x.79 -0.74553 0.41549 -1.794 0.2146
## x.80 0.41662 0.42491 0.980 0.4302
## x.81 -0.86401 0.36441 -2.371 0.1412
## x.82 0.40825 0.35372 1.154 0.3677
## x.83 -0.05796 0.35722 -0.162 0.8860
## x.84 0.25475 0.36238 0.703 0.5549
## x.85 -0.63014 0.38514 -1.636 0.2434
## x.86 -0.74759 0.40398 -1.851 0.2054
## x.87 0.80503 0.37020 2.175 0.1617
## x.88 0.72453 0.39145 1.851 0.2054
## x.89 0.82978 0.33961 2.443 0.1345
## x.90 -0.11936 0.33154 -0.360 0.7533
## x.91 0.37143 0.34575 1.074 0.3951
## x.92 0.06479 0.34454 0.188 0.8682
## x.93 -0.32275 0.36838 -0.876 0.4734
## x.94 0.31294 0.39693 0.788 0.5131
## x.95 -1.21638 0.42362 -2.871 0.1029
## x.96 0.03223 0.40673 0.079 0.9440
## x.97 -0.91908 0.40973 -2.243 0.1541
## x.98 0.36099 0.36377 0.992 0.4256
## x.99 -1.09005 0.36665 -2.973 0.0970 .
## x.100 -0.94914 0.34119 -2.782 0.1086
## x.101 -0.85282 0.35527 -2.401 0.1384
## x.102 0.82965 0.36380 2.281 0.1501
## x.103 -0.22725 0.36527 -0.622 0.5973
## x.104 0.31999 0.34784 0.920 0.4547
## x.105 0.35691 0.36375 0.981 0.4300
## x.106 -0.07541 0.37506 -0.201 0.8592
## x.107 1.04124 0.40422 2.576 0.1234
## x.108 1.22284 0.41263 2.964 0.0975 .
## x.109 0.32528 0.39406 0.825 0.4959
## x.110 -1.26833 0.46095 -2.752 0.1106
## x.111 -0.88249 0.46193 -1.910 0.1963
## x.112 0.61551 0.39874 1.544 0.2627
## x.113 -0.65069 0.37789 -1.722 0.2272
## x.114 -0.69886 0.34281 -2.039 0.1783
## x.115 -1.13534 0.32560 -3.487 0.0733 .
## x.116 -1.57696 0.35875 -4.396 0.0481 *
## x.117 -0.49498 0.35708 -1.386 0.3000
## x.118 0.20878 0.35894 0.582 0.6196
## x.119 0.69434 0.35453 1.958 0.1893
## x.120 -0.98799 0.36039 -2.741 0.1113
## x.121 -0.28244 0.37734 -0.749 0.5322
## x.122 0.68594 0.34352 1.997 0.1839
## x.123 0.55573 0.34279 1.621 0.2464
## x.124 0.73321 0.29649 2.473 0.1319
## x.125 -1.19172 0.35138 -3.392 0.0770 .
## x.126 -0.20748 0.34445 -0.602 0.6081
## x.127 -0.37241 0.31421 -1.185 0.3577
## x.128 0.64210 0.33468 1.919 0.1951
## x.129 0.29211 0.33906 0.862 0.4797
## x.130 -0.53329 0.33099 -1.611 0.2484
## x.131 -1.17812 0.34147 -3.450 0.0747 .
## x.132 -0.14416 0.35155 -0.410 0.7215
## x.133 1.79342 0.44927 3.992 0.0574 .
## x.134 0.33113 0.41870 0.791 0.5119
## x.135 0.21940 0.38183 0.575 0.6236
## x.136 -0.46663 0.43068 -1.083 0.3918
## x.137 1.16626 0.49084 2.376 0.1407
## x.138 0.49175 0.51923 0.947 0.4436
## x.139 1.29882 0.51787 2.508 0.1289
## x.140 0.66217 0.52774 1.255 0.3363
## x.141 -0.66622 0.50234 -1.326 0.3159
## x.142 -1.32841 0.54420 -2.441 0.1347
## x.143 0.59161 0.53741 1.101 0.3857
## x.144 0.53200 0.45532 1.168 0.3631
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 2.082 on 2 degrees of freedom
## Multiple R-squared: 0.9997, Adjusted R-squared: 0.9755
## F-statistic: 41.35 on 145 and 2 DF, p-value: 0.02389
##
## AIC and BIC values for the model:
## AIC BIC
## 1 294.1069 734.6971
(fore.dlm2 <- forecast(model = model.dlm2, x=test$Xt, h=73))
## $forecasts
## [1] 109.78516 91.78140 91.25585 55.35738 66.41172 77.73669 82.15412
## [8] 86.37902 57.53668 30.17052 50.78654 79.50810 83.80551 63.35540
## [15] 62.62922 81.55078 117.56920 129.05463 107.76862 75.99981 76.20548
## [22] 106.90993 108.98336 81.46696 46.67083 52.41896 72.34086 77.67981
## [29] 83.81451 75.83047 71.50438 99.20010 129.93702 110.87362 87.37850
## [36] 99.74698 108.37691 99.37759 75.45467 77.51992 58.34556 86.24903
## [43] 98.86877 79.17583 87.82749 76.98318 104.15242 111.37539 104.78991
## [50] 91.56591 87.71378 83.68193 96.39103 58.63722 74.86917 81.66463
## [57] 86.51047 95.75749 92.05470 67.31057 97.98791 82.93641 90.58239
## [64] 72.10880 58.41265 64.20732 65.23807 85.47745 73.96511 65.77097
## [71] 59.08839 77.77828 88.93250
##
## $call
## forecast.dlm(model = model.dlm2, x = test$Xt, h = 73)
##
## attr(,"class")
## [1] "forecast.dlm" "dLagM"
mape.dlm2 <- MAPE(fore.dlm2$forecast, test$Yt)
mape_train <- GoF(model.dlm2)["MAPE"]
c("MAPE_testing" = mape.dlm2,"MAPE_training" = mape_train)
## $MAPE_testing
## [1] 0.490863
##
## $MAPE_training.MAPE
## [1] 0.003179645
Model Autoregressive Distributed Lag (ADL) adalah model regresi yang memasukkan nilai variabel yang menjelaskan baik nilai masa kini atau nilai masa lalu (lag) dari variabel independen sebagai tambahan pada model yang memasukkan nilai lag dari variabel dependen sebagai salah satu variabel penjelas (Chilin et al. 2019).
model.ardl = ardlDlm(x = train$Xt, y = train$Yt, p =1, q =1)
summary(model.ardl)
##
## Time series regression with "ts" data:
## Start = 2, End = 292
##
## Call:
## dynlm(formula = as.formula(model.text), data = data, start = 1)
##
## Residuals:
## Min 1Q Median 3Q Max
## -19.4737 -2.4665 0.0533 3.1498 18.2177
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 6.82609 2.37894 2.869 0.00442 **
## X.t -3.08676 0.17955 -17.192 < 2e-16 ***
## X.1 3.00224 0.18155 16.536 < 2e-16 ***
## Y.1 0.92536 0.02051 45.117 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 5.152 on 287 degrees of freedom
## Multiple R-squared: 0.9127, Adjusted R-squared: 0.9118
## F-statistic: 1000 on 3 and 287 DF, p-value: < 2.2e-16
AIC(model.ardl)
## [1] 1785.87
BIC(model.ardl)
## [1] 1804.237
(fore.ardl <- forecast(model = model.ardl, x=test$Xt,h=73))
## $forecasts
## [1] 47.94369 48.28010 51.10616 48.86337 55.22185 52.51485 54.43205 56.25452
## [9] 59.70341 63.02956 62.07941 65.14169 65.89483 66.20312 68.05558 61.89212
## [17] 64.47835 69.63986 69.89882 64.57055 67.01695 69.09057 67.21492 65.79672
## [25] 66.86744 64.37351 67.27380 70.51948 71.65511 71.41133 71.07028 74.03931
## [33] 68.70466 70.71810 67.23332 66.36957 70.64928 63.66539 61.60196 63.80678
## [41] 67.52028 70.01197 75.16122 75.06665 76.29216 69.62646 76.58807 76.91365
## [49] 76.72563 77.14813 80.84020 72.43530 81.38795 69.64525 69.98802 72.55898
## [57] 73.88508 74.82816 81.73576 75.97791 80.02650 74.04439 67.83707 73.32783
## [65] 75.51400 80.60015 74.63767 84.67119 74.29255 75.02819 73.69565 79.67518
## [73] 82.79379
##
## $call
## forecast.ardlDlm(model = model.ardl, x = test$Xt, h = 73)
##
## attr(,"class")
## [1] "forecast.ardlDlm" "dLagM"
mape.ardl <- MAPE(fore.ardl$forecasts, test$Yt)
mape_train <- GoF(model.ardl)["MAPE"]
c("MAPE_testing" = mape.ardl, "MAPE_training" = mape_train)
## $MAPE_testing
## [1] 0.1997476
##
## $MAPE_training.MAPE
## [1] 0.07410837
ardlBoundOrders(data = data.frame(data), formula = Yt ~ Xt)
## $p
## Xt
## 1 15
##
## $q
## [1] 5
##
## $Stat.table
## q = 1 q = 2 q = 3 q = 4 q = 5 q = 6 q = 7 q = 8
## p = 1 2248.735 2245.339 2229.620 2214.967 2207.268 2202.682 2197.356 2186.308
## p = 2 2239.771 2232.156 2218.370 2204.207 2195.852 2190.264 2184.876 2174.541
## p = 3 2225.696 2225.696 2218.912 2205.065 2196.910 2191.117 2185.962 2175.542
## p = 4 2208.495 2210.472 2210.472 2204.183 2196.646 2190.659 2185.598 2175.865
## p = 5 2197.030 2198.783 2200.776 2200.776 2189.747 2183.262 2178.584 2169.386
## p = 6 2178.731 2179.654 2181.512 2183.008 2183.008 2184.989 2180.410 2171.275
## p = 7 2185.818 2184.026 2183.707 2184.561 2180.879 2180.879 2181.359 2172.496
## p = 8 2183.549 2179.484 2176.259 2175.887 2171.017 2172.523 2172.523 2173.246
## p = 9 2175.558 2172.524 2167.667 2167.157 2160.183 2161.671 2163.540 2163.540
## p = 10 2170.456 2168.294 2163.688 2162.427 2154.521 2156.195 2158.195 2160.186
## p = 11 2169.918 2167.207 2163.345 2162.041 2152.335 2154.225 2155.946 2157.354
## p = 12 2166.276 2163.034 2158.522 2158.287 2148.941 2150.929 2152.485 2152.981
## p = 13 2167.803 2163.613 2157.698 2156.998 2145.684 2147.666 2148.888 2148.334
## p = 14 2163.416 2159.213 2153.708 2153.164 2142.451 2144.446 2145.745 2145.196
## p = 15 2145.927 2142.289 2136.031 2135.871 2126.544 2128.490 2129.931 2129.908
## q = 9 q = 10 q = 11 q = 12 q = 13 q = 14 q = 15
## p = 1 2176.082 2172.715 2169.627 2165.566 2160.642 2157.274 2149.986
## p = 2 2165.594 2161.763 2158.630 2154.430 2149.893 2146.778 2139.634
## p = 3 2166.404 2162.547 2159.476 2155.034 2150.555 2147.460 2140.183
## p = 4 2166.750 2162.910 2159.881 2155.739 2151.430 2148.348 2140.995
## p = 5 2158.631 2154.677 2151.617 2147.626 2142.672 2139.647 2131.936
## p = 6 2160.358 2156.461 2153.400 2149.398 2144.461 2141.446 2133.720
## p = 7 2161.534 2157.622 2154.489 2150.535 2145.588 2142.598 2134.858
## p = 8 2162.308 2158.477 2155.270 2151.072 2146.033 2143.027 2134.939
## p = 9 2164.297 2160.445 2157.240 2153.052 2147.982 2144.977 2136.840
## p = 10 2160.186 2162.180 2158.964 2154.822 2149.763 2146.771 2138.603
## p = 11 2158.885 2158.885 2160.877 2156.771 2151.685 2148.700 2140.441
## p = 12 2154.117 2156.107 2156.107 2156.891 2151.412 2148.418 2139.653
## p = 13 2148.840 2150.669 2151.845 2151.845 2153.408 2150.411 2141.652
## p = 14 2145.840 2147.811 2149.704 2150.382 2150.382 2152.020 2143.035
## p = 15 2130.413 2132.347 2134.338 2134.556 2136.274 2136.274 2138.264
##
## $min.Stat
## [1] 2126.544
# sama dengan model dlm p=1
cons_lm1 <- dynlm(Yt ~ Xt+L(Xt),data = train.ts)
# sama dengan model ardl p=0 q=1
cons_lm2 <- dynlm(Yt ~ Xt+L(Yt),data = train.ts)
# sama dengan ardl p=1 q=1
cons_lm3 <- dynlm(Yt ~ Xt+L(Xt)+L(Yt),data = train.ts)
# sama dengan dlm p=2
cons_lm4 <- dynlm(Yt ~ Xt+L(Xt)+L(Xt,2),data = train.ts)
summary(cons_lm1)
##
## Time series regression with "ts" data:
## Start = 2, End = 292
##
## Call:
## dynlm(formula = Yt ~ Xt + L(Xt), data = train.ts)
##
## Residuals:
## Min 1Q Median 3Q Max
## -33.921 -9.995 0.708 10.955 32.305
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 95.9128 3.7680 25.455 < 2e-16 ***
## Xt -2.7229 0.5094 -5.346 1.84e-07 ***
## L(Xt) 1.4055 0.5057 2.779 0.00581 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 14.63 on 288 degrees of freedom
## Multiple R-squared: 0.2934, Adjusted R-squared: 0.2885
## F-statistic: 59.79 on 2 and 288 DF, p-value: < 2.2e-16
summary(cons_lm2)
##
## Time series regression with "ts" data:
## Start = 2, End = 292
##
## Call:
## dynlm(formula = Yt ~ Xt + L(Yt), data = train.ts)
##
## Residuals:
## Min 1Q Median 3Q Max
## -20.1938 -4.3465 -0.4302 3.5070 20.6395
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 15.11531 3.24410 4.659 4.85e-06 ***
## Xt -0.24475 0.07249 -3.376 0.000836 ***
## L(Yt) 0.85924 0.02806 30.619 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 7.186 on 288 degrees of freedom
## Multiple R-squared: 0.8295, Adjusted R-squared: 0.8283
## F-statistic: 700.5 on 2 and 288 DF, p-value: < 2.2e-16
summary(cons_lm3)
##
## Time series regression with "ts" data:
## Start = 2, End = 292
##
## Call:
## dynlm(formula = Yt ~ Xt + L(Xt) + L(Yt), data = train.ts)
##
## Residuals:
## Min 1Q Median 3Q Max
## -19.4737 -2.4665 0.0533 3.1498 18.2177
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 6.82609 2.37894 2.869 0.00442 **
## Xt -3.08676 0.17955 -17.192 < 2e-16 ***
## L(Xt) 3.00224 0.18155 16.536 < 2e-16 ***
## L(Yt) 0.92536 0.02051 45.117 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 5.152 on 287 degrees of freedom
## Multiple R-squared: 0.9127, Adjusted R-squared: 0.9118
## F-statistic: 1000 on 3 and 287 DF, p-value: < 2.2e-16
summary(cons_lm4)
##
## Time series regression with "ts" data:
## Start = 3, End = 292
##
## Call:
## dynlm(formula = Yt ~ Xt + L(Xt) + L(Xt, 2), data = train.ts)
##
## Residuals:
## Min 1Q Median 3Q Max
## -34.735 -10.396 1.032 10.844 31.908
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 95.5225 3.8105 25.068 < 2e-16 ***
## Xt -2.7795 0.5103 -5.446 1.11e-07 ***
## L(Xt) 0.6573 0.6849 0.960 0.338
## L(Xt, 2) 0.8202 0.5061 1.621 0.106
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 14.61 on 286 degrees of freedom
## Multiple R-squared: 0.2974, Adjusted R-squared: 0.29
## F-statistic: 40.35 on 3 and 286 DF, p-value: < 2.2e-16
deviance(cons_lm1)
## [1] 61638.52
deviance(cons_lm2)
## [1] 14873.74
deviance(cons_lm3)
## [1] 7616.657
deviance(cons_lm4)
## [1] 61070.82
if(require("lmtest"))encomptest(cons_lm1, cons_lm2)
## Encompassing test
##
## Model 1: Yt ~ Xt + L(Xt)
## Model 2: Yt ~ Xt + L(Yt)
## Model E: Yt ~ Xt + L(Xt) + L(Yt)
## Res.Df Df F Pr(>F)
## M1 vs. ME 287 -1 2035.57 < 2.2e-16 ***
## M2 vs. ME 287 -1 273.45 < 2.2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
dwtest(cons_lm1)
##
## Durbin-Watson test
##
## data: cons_lm1
## DW = 0.13556, p-value < 2.2e-16
## alternative hypothesis: true autocorrelation is greater than 0
dwtest(cons_lm2)
##
## Durbin-Watson test
##
## data: cons_lm2
## DW = 2.0681, p-value = 0.6823
## alternative hypothesis: true autocorrelation is greater than 0
dwtest(cons_lm3)
##
## Durbin-Watson test
##
## data: cons_lm3
## DW = 1.8372, p-value = 0.06655
## alternative hypothesis: true autocorrelation is greater than 0
dwtest(cons_lm4)
##
## Durbin-Watson test
##
## data: cons_lm4
## DW = 0.1396, p-value < 2.2e-16
## alternative hypothesis: true autocorrelation is greater than 0
bptest(cons_lm1)
##
## studentized Breusch-Pagan test
##
## data: cons_lm1
## BP = 15.981, df = 2, p-value = 0.0003386
bptest(cons_lm2)
##
## studentized Breusch-Pagan test
##
## data: cons_lm2
## BP = 4.6984, df = 2, p-value = 0.09544
bptest(cons_lm3)
##
## studentized Breusch-Pagan test
##
## data: cons_lm3
## BP = 19.402, df = 3, p-value = 0.0002257
bptest(cons_lm4)
##
## studentized Breusch-Pagan test
##
## data: cons_lm4
## BP = 17.687, df = 3, p-value = 0.0005104
shapiro.test(residuals(cons_lm1))
##
## Shapiro-Wilk normality test
##
## data: residuals(cons_lm1)
## W = 0.99027, p-value = 0.04992
shapiro.test(residuals(cons_lm2))
##
## Shapiro-Wilk normality test
##
## data: residuals(cons_lm2)
## W = 0.9879, p-value = 0.01551
shapiro.test(residuals(cons_lm3))
##
## Shapiro-Wilk normality test
##
## data: residuals(cons_lm3)
## W = 0.97874, p-value = 0.000253
shapiro.test(residuals(cons_lm4))
##
## Shapiro-Wilk normality test
##
## data: residuals(cons_lm4)
## W = 0.9895, p-value = 0.03467
akurasi <- matrix(c(mape.koyck, mape.dlm, mape.dlm2, mape.ardl))
row.names(akurasi) <- c("Koyck","DLM 1","DLM 2","Autoregressive")
colnames(akurasi) <- c("MAPE")
akurasi
## MAPE
## Koyck 0.1542487
## DLM 1 0.1941691
## DLM 2 0.4908630
## Autoregressive 0.1997476
Model terbaik dapat dilihat dari MAPE terkecil, berdasarkan hasil didapatkan model terbaik yaitu Model Koyck.
par(mfrow=c(1,1))
plot(1:nrow(test), test$Yt, type="b", col="black",xlab="Index waktu", ylim=c(0,150),
main="Perbandingan Metode Ramalan vs Aktual")
points(1:nrow(test), test$Yt,col="black",pch=19)
lines(1:nrow(test), test$Yt,col="black")
points(1:nrow(test), fore.koyck$forecasts,col="blue",pch=19)
lines(1:nrow(test), fore.koyck$forecasts,col="blue")
points(1:nrow(test), fore.dlm$forecasts,col="red",pch=19)
lines(1:nrow(test), fore.dlm$forecasts,col="red")
points(1:nrow(test), fore.dlm2$forecasts,col="orange",pch=19)
lines(1:nrow(test), fore.dlm2$forecasts,col="orange")
points(1:nrow(test), fore.ardl$forecasts,col="green",pch=19)
lines(1:nrow(test), fore.ardl$forecasts,col="green")
legend("topleft",c("data aktual", "Koyck", "DLM", "DLM Optimum", "Autoregressive"), lty=1,
col=c("black", "blue", "red", "orange", "green"))
Aqibah et al. 2020. Model dinamis autoregressive distributed lag (studi kasus: pengaruh kurs dollar Amerika dan inflasi terhadap harga saham tahun 2014-2018). E-Jurnal Matematika 9(4): 240-250.
Chillin et al. 2019. Model autoregressive distributed lag (ADL) pada data harga saham. Buletin Ilmiah Math. Stat. dan Terapan (Bimaster) 8(1): 83-90.
https://www.kaggle.com/code/grosvenpaul/dengue-cases-visualization-and-modeling/report