Package

library(dLagM) #bisa otomatis timeseries datanya
## Warning: package 'dLagM' was built under R version 4.1.3
## Loading required package: nardl
## Warning: package 'nardl' was built under R version 4.1.3
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
## Loading required package: dynlm
## Warning: package 'dynlm' was built under R version 4.1.3
## Loading required package: zoo
## 
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
## 
##     as.Date, as.Date.numeric
library(dynlm) #data harus timeseries
library(MLmetrics) #MAPE
## Warning: package 'MLmetrics' was built under R version 4.1.3
## 
## Attaching package: 'MLmetrics'
## The following object is masked from 'package:dLagM':
## 
##     MAPE
## The following object is masked from 'package:base':
## 
##     Recall
library(lmtest)
## Warning: package 'lmtest' was built under R version 4.1.3
library(car)
## Warning: package 'car' was built under R version 4.1.3
## Loading required package: carData
## Warning: package 'carData' was built under R version 4.1.3
library(readxl)

Data

Data yang digunakan adalah data cuaca harian di Kota Delhi dengan amatan dari 1 Januari 2016 hingga 31 Desember 2016. Data ini berasal dari situs web kaggle. Variabel yang akan digunakan untuk analisis adalah Date, Meantemp (Xt), dan Humidity (Yt) dengan jumlah amatan 366.

library(readxl)
data <- read_excel("D://Kuliah//Semester 5//MPDW//Tugas Individu 4//Tugas Individu4.xlsx")
str(data)
## tibble [366 x 3] (S3: tbl_df/tbl/data.frame)
##  $ date: POSIXct[1:366], format: "2016-01-01" "2016-01-02" ...
##  $ Xt  : num [1:366] 14.7 14 14.4 15.8 15.8 ...
##  $ Yt  : num [1:366] 72.3 75.9 74.8 77.1 88.8 ...
data
## # A tibble: 366 x 3
##    date                   Xt    Yt
##    <dttm>              <dbl> <dbl>
##  1 2016-01-01 00:00:00  14.7  72.3
##  2 2016-01-02 00:00:00  14    75.9
##  3 2016-01-03 00:00:00  14.4  74.8
##  4 2016-01-04 00:00:00  15.8  77.1
##  5 2016-01-05 00:00:00  15.8  88.8
##  6 2016-01-06 00:00:00  17.4  81.6
##  7 2016-01-07 00:00:00  17.1  87  
##  8 2016-01-08 00:00:00  15.5  83.2
##  9 2016-01-09 00:00:00  15.9  65.1
## 10 2016-01-10 00:00:00  15.6  74.4
## # ... with 356 more rows

Dilakukan splitting data dengan data train dengan jumlah amatan 80% dari jumlah seluruh amatan yaitu 292 dan data test yaitu 20% dari jumlah seluruh amatan yaitu 73 amatan.

#SPLIT DATA
train<-data[c(1:292),]
test<-data[293:366,]
#data time series
train.ts<-ts(train)
test.ts<-ts(test)
data.ts<-ts(data)
Xt <- data$Xt
Yt <- data$Yt

Eksplorasi Data Time Series

Plot Time Series

Plot Time Series Humidity

data.ts1<-ts(Yt)
plot(data.ts1, main = "Time Series Plot", xlab = "Period", ylab="Humadity")
points(data.ts1)

Berdasarkan plot diatas, terlihat bahwa peubah Humidity atau kelembapan memiliki pola data trend. Peubah humidity ini akan dijadikan peubah penjelas untuk analisis regresi.

Plot Time Series Meantemp

data.ts1<-ts(Xt)
plot(data.ts1, main = "Time Series Plot", xlab = "Period", ylab="Meantemp")
points(data.ts1)

Berdasarkan plot diatas, terlihat bahwa peubah Meantemp atau rata-rata suhu memiliki pola data trend. Peubah meantemp ini akan dijadikan peubah respon untuk analisis regresi.

Korelasi Antar Peubah

cor(Xt, Yt)
## [1] -0.5087387

Scatter Plot Peubah X dan Y

plot(Xt, Yt, pch = 20, col = "blue", main = "Scatter Plot Humidity VS Meantemp")

Berdasarkan scatter plot terlihat bahwa hubungan antara Peubah Meantemp (X) dan Humidity (Y) memiliki hubungan linier negatif dengan nilai korelasi -0.5087387.

Model KOYCK

Metode Koyck didasarkan asumsi bahwa semakin jauh jarak lag peubah independen dari periode sekarang maka semakin kecil pengaruh peubah lag terhadap peubah dependen Koyck mengusulkan suatu metode untuk menduga model dinamis distributed lag dengan mengasumsikan bahwa semua koefisien 𝛽 mempunyai tanda sama. Model Koyck merupakan jenis paling umum dari model infinite distributed lag dan juga dikenal sebagai geometric lag.

#MODEL KOYCK
model.koyck = dLagM::koyckDlm(x = train$Xt, y = train$Yt)
summary(model.koyck)
## 
## Call:
## "Y ~ (Intercept) + Y.1 + X.t"
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -22.0814  -4.2197  -0.8517   3.8078  20.2773 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  4.76581    3.43365   1.388    0.166    
## Y.1          0.90944    0.02901  31.353   <2e-16 ***
## X.t          0.01508    0.07740   0.195    0.846    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 7.345 on 288 degrees of freedom
## Multiple R-Squared: 0.8219,  Adjusted R-squared: 0.8207 
## Wald test: 665.2 on 2 and 288 DF,  p-value: < 2.2e-16 
## 
## Diagnostic tests:
## NULL
## 
##                             alpha       beta       phi
## Geometric coefficients:  52.62673 0.01508206 0.9094413

Didapatkan nilai intersep sebesar 4.76581, nilai Y sebesar 0.90944, dan nilai X sebesar 0.01508. Nilai p-value Y < 0.05 sehingga penduga parameter Y signifikan.

AIC(model.koyck)
## [1] 1991.324
BIC(model.koyck)
## [1] 2006.018
#ramalan
(fore.koyck <- forecast(model = model.koyck, x=test$Xt, h=74))
## $forecasts
##  [1] 49.27208 50.01441 50.67945 51.29820 51.83326 52.33496 52.78428 53.18593
##  [9] 53.53580 53.83835 54.11797 54.35735 54.57072 54.76244 54.92689 55.10529
## [17] 55.25510 55.36570 55.46343 55.57674 55.66750 55.73902 55.81188 55.88420
## [25] 55.94422 56.01023 56.05593 56.08081 56.09634 56.10988 56.12219 56.11734
## [33] 56.13681 56.14379 56.16583 56.18959 56.19002 56.22327 56.26397 56.29108
## [41] 56.29790 56.29138 56.25919 56.22811 56.19167 56.18870 56.15118 56.11309
## [49] 56.07697 56.03976 55.98557 55.97424 55.91911 55.92323 55.92497 55.91360
## [57] 55.89580 55.87375 55.81850 55.79339 55.74919 55.73575 55.75282 55.74196
## [65] 55.72055 55.67491 55.66006 55.59651 55.58612 55.57226 55.56519 55.52891
## [73] 55.47867 55.44743
## 
## $call
## forecast.koyckDlm(model = model.koyck, x = test$Xt, h = 74)
## 
## attr(,"class")
## [1] "forecast.koyckDlm" "dLagM"
#mape data testing
mape.koyck <- MAPE(fore.koyck$forecasts, test$Yt)

#akurasi data training
mape_train <- dLagM::GoF(model.koyck)["MAPE"]

c("MAPE_testing" = mape.koyck, "MAPE_training" = mape_train)
## $MAPE_testing
## [1] 0.1570652
## 
## $MAPE_training.MAPE
## [1] 0.1044997

Diperoleh nilai MAPE Testing sebesar 0.1570652 dan nilai MAPE Training sebesar 0.1044997. Sehingga didapatkan nilai MAPE Testing lebih besar dari nilai MAPE Training.

Regression with Distributed Lag

Regression with Distributed Lag (lag=2)

#REGRESSION WITH DISTRIBUTED LAG -> estimasi parameter menggunakan least square

model.dlm = dLagM::dlm(x = train$Xt,y = train$Yt , q = 2)
summary(model.dlm)
## 
## Call:
## lm(formula = model.formula, data = design)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -34.735 -10.396   1.032  10.844  31.908 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  95.5225     3.8105  25.068  < 2e-16 ***
## x.t          -2.7795     0.5103  -5.446 1.11e-07 ***
## x.1           0.6573     0.6849   0.960    0.338    
## x.2           0.8202     0.5061   1.621    0.106    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 14.61 on 286 degrees of freedom
## Multiple R-squared:  0.2974, Adjusted R-squared:   0.29 
## F-statistic: 40.35 on 3 and 286 DF,  p-value: < 2.2e-16
## 
## AIC and BIC values for the model:
##        AIC      BIC
## 1 2384.458 2402.807

Didapatkan nilai intersep model sebesar 95.5225, nilai X.t sebesar -2.7795 , nilai x.1 sebesar 0.6573 , dan nilai x.2 sebesar 0.8202. Terlihat nilai p-value x.t < 0.05 yang artinya bahwa penduga parameter ini signifikan. Menurut pengujian hipotesis diperoleh bahwa intersep dan x.t tolak H0 sehingga keduanya berpengaruh signifikan terhadap humidity pada taraf nyata 5%.

AIC(model.dlm)
## [1] 2384.458
BIC(model.dlm)
## [1] 2402.807
#ramalan
(fore.dlm <- forecast(model = model.dlm, x=test$Xt, h=74)) #meramalkan 74 periode ke depan
## $forecasts
##  [1] 57.31445 57.09269 59.39757 56.51348 61.66904 58.44252 58.87464 60.67966
##  [9] 62.83689 64.66817 62.32517 64.41751 64.81010 64.23766 65.72772 59.85295
## [17] 62.86468 68.61480 67.34748 61.32746 64.50105 67.32515 64.73532 63.36081
## [25] 65.11053 63.08261 65.90876 68.98041 68.78249 67.67263 67.26121 70.18737
## [33] 65.08698 67.23266 65.17160 64.58497 69.50621 62.72890 61.07938 65.15115
## [41] 68.57305 69.60106 72.91796 71.41372 71.41222 65.39903 72.68547 73.54143
## [49] 71.74686 72.26805 75.75051 67.63135 76.43968 66.55164 66.84673 72.09805
## [57] 72.79894 72.85020 78.67460 72.18930 75.30861 70.81411 65.41782 73.01404
## [65] 75.58171 78.46838 71.78583 80.74740 71.28094 71.43910 72.80332 78.30613
## [73] 80.48306 75.44722
## 
## $call
## forecast.dlm(model = model.dlm, x = test$Xt, h = 74)
## 
## attr(,"class")
## [1] "forecast.dlm" "dLagM"
#mape data testing
mape.dlm <- MAPE(fore.dlm$forecasts, test$Yt)

#akurasi data training
mape_train <- GoF(model.dlm)["MAPE"]

c("MAPE_testing" = mape.dlm, "MAPE_training" = mape_train)
## $MAPE_testing
## [1] 0.1933397
## 
## $MAPE_training.MAPE
## [1] 0.256812

Diperoleh nilai MAPE Testing sebesar 0.1933397 dan nilai MAPE Training sebesar 0.256812. Sehingga didapatkan nilai MAPE Training lebih besar dari nilai MAPE Testing

Regression with Distributed Lag Optimum

#penentuan lag optimum 
finiteDLMauto(formula = Yt ~ Xt,
              data = data.frame(train), q.min = 1, q.max = 4 ,
              model.type = "dlm", error.type = "AIC", trace = TRUE) ##q max lag maksimum
##   q - k    MASE      AIC      BIC   GMRAE   MBRAE R.Adj.Sq Ljung-Box
## 4     4 2.09455 2367.910 2393.551 2.65759 0.47505  0.29488         0
## 3     3 2.06821 2375.516 2397.515 2.48524 0.69709  0.29444         0
## 2     2 2.08330 2384.458 2402.807 2.59019 4.99611  0.29002         0
## 1     1 2.08456 2392.336 2407.030 2.57200 0.94080  0.28850         0
#model dlm dengan lag optimum
model.dlm2 = dLagM::dlm(x = train$Xt,y = train$Yt , q = 4) #terdapat lag yang tidak signifikan sehingga dapat dikurangi jumlah lagnya 
summary(model.dlm2)
## 
## Call:
## lm(formula = model.formula, data = design)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -36.113 -10.016   0.772  11.050  29.681 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 94.82451    3.87387  24.478  < 2e-16 ***
## x.t         -3.04583    0.52072  -5.849 1.37e-08 ***
## x.1          0.73151    0.68346   1.070    0.285    
## x.2         -0.02641    0.68343  -0.039    0.969    
## x.3          0.28278    0.68312   0.414    0.679    
## x.4          0.78392    0.51635   1.518    0.130    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 14.56 on 282 degrees of freedom
## Multiple R-squared:  0.3072, Adjusted R-squared:  0.2949 
## F-statistic:    25 on 5 and 282 DF,  p-value: < 2.2e-16
## 
## AIC and BIC values for the model:
##       AIC      BIC
## 1 2367.91 2393.551

Didapatkan nilai intersep model sebesar 94.82451, nilai koefisien periode saat itu serta 4 periode sebelumnya. Terlihat nilai p-value x.t < 0.05 yang artinya bahwa penduga parameter ini signifikan. Menurut pengujian hipotesis diperoleh bahwa intersep dan x.t tolak H0 sehingga keduanya berpengaruh signifikan terhadap humidity pada taraf nyata 5%.

AIC(model.dlm2)
## [1] 2367.91
BIC(model.dlm2)
## [1] 2393.551
#ramalan
(fore.dlm2 <- forecast(model = model.dlm2, x=test$Xt, h=74))
## $forecasts
##  [1] 56.59957 57.04155 59.28097 56.01542 62.66976 58.18794 60.10677 61.36043
##  [9] 62.99033 66.06790 63.94224 66.54603 65.59707 65.15220 67.00209 59.84304
## [17] 63.50397 67.71951 67.10078 63.34035 65.88400 66.08528 64.30236 64.51733
## [25] 65.11413 62.08995 66.06837 68.92931 69.49432 69.62171 68.54837 70.79019
## [33] 64.80931 68.13271 64.10035 63.74108 69.21389 61.27930 61.68171 63.93176
## [41] 66.54661 70.17283 75.24964 73.51231 73.95569 66.31730 73.42636 72.24107
## [49] 72.54829 74.27735 76.29668 67.41707 78.18824 64.38318 67.15023 70.70730
## [57] 70.27558 73.90352 80.39217 72.80592 77.90407 70.45636 64.51586 72.12638
## [65] 72.51385 79.22759 73.62069 83.47859 70.34205 72.76003 71.79161 76.38422
## [73] 80.78853 76.96304
## 
## $call
## forecast.dlm(model = model.dlm2, x = test$Xt, h = 74)
## 
## attr(,"class")
## [1] "forecast.dlm" "dLagM"
#akurasi testing
mape.dlm2 <- MAPE(fore.dlm2$forecasts, test$Yt)

#akurasi data training
mape_train <- GoF(model.dlm2)["MAPE"]

c("MAPE_testing" = mape.dlm2, "MAPE_training" = mape_train)
## $MAPE_testing
## [1] 0.1914698
## 
## $MAPE_training.MAPE
## [1] 0.2578234

Diperoleh nilai MAPE Testing sebesar 0.1914698 dan nilai MAPE Training sebesar 0.2578234. Sehingga didapatkan nilai MAPE Training lebih besar dari nilai MAPE Testing

Model Autoregressive / Dynamic Regression

Apabila peubah dependen dipengaruhi oleh peubah independen pada waktu sekarang, serta dipengaruhii juga oleh peubah dependen itu sendiri pada satu waktu yang lalu maka model tersebut disebut autoregressive (Gujarati, 2004)

#MODEL AUTOREGRESSIVE 
#library(dLagM)
model.ardl = ardlDlm(x = train$Xt, y = train$Yt, p = 1 , q = 1) #p:lag x, q:lag y
#model untuk p=1, q=1: yt=b0+b1yt-1+b2xt+b3xt-1
#model untuk p=2, q=3: yt=b0+b1yt-1+b2yt-2+b3xt+b4xt-1+b5xt-2

summary(model.ardl)
## 
## Time series regression with "ts" data:
## Start = 2, End = 292
## 
## Call:
## dynlm(formula = as.formula(model.text), data = data, start = 1)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -19.4737  -2.4665   0.0533   3.1498  18.2177 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  6.82609    2.37894   2.869  0.00442 ** 
## X.t         -3.08676    0.17955 -17.192  < 2e-16 ***
## X.1          3.00224    0.18155  16.536  < 2e-16 ***
## Y.1          0.92536    0.02051  45.117  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 5.152 on 287 degrees of freedom
## Multiple R-squared:  0.9127, Adjusted R-squared:  0.9118 
## F-statistic:  1000 on 3 and 287 DF,  p-value: < 2.2e-16

Didapatkan nilai intersep sebesar 6.82609, nilai x.t sebesar -3.08676, nilai x.1 sebesar 3.00224, dan nilai y.1 sebesar 0.92536. Terlihat nilai p-value x.t, x.1, dan y.1 < 0.05 yang artinya bahwa ketiga penduga parameter ini signifikan pada taraf nyata 5%. Menurut pengujian hipotesis diperoleh bahwa intersep, x.t, x.1, dan y.1 tolak H0 sehingga ketiganya berpengaruh signifikan terhadap humidity pada taraf nyata 5%.

AIC(model.ardl)
## [1] 1785.87
BIC(model.ardl)
## [1] 1804.237
#ramalan
(fore.ardl <- forecast(model = model.ardl, x=test$Xt, h=74))
## $forecasts
##  [1] 47.94369 48.28010 51.10616 48.86337 55.22185 52.51485 54.43205 56.25452
##  [9] 59.70341 63.02956 62.07941 65.14169 65.89483 66.20312 68.05558 61.89212
## [17] 64.47835 69.63986 69.89882 64.57055 67.01695 69.09057 67.21492 65.79672
## [25] 66.86744 64.37351 67.27380 70.51948 71.65511 71.41133 71.07028 74.03931
## [33] 68.70466 70.71810 67.23332 66.36957 70.64928 63.66539 61.60196 63.80678
## [41] 67.52028 70.01197 75.16122 75.06665 76.29216 69.62646 76.58807 76.91365
## [49] 76.72563 77.14813 80.84020 72.43530 81.38795 69.64525 69.98802 72.55898
## [57] 73.88508 74.82816 81.73576 75.97791 80.02650 74.04439 67.83707 73.32783
## [65] 75.51400 80.60015 74.63767 84.67119 74.29255 75.02819 73.69565 79.67518
## [73] 82.79379 79.29324
## 
## $call
## forecast.ardlDlm(model = model.ardl, x = test$Xt, h = 74)
## 
## attr(,"class")
## [1] "forecast.ardlDlm" "dLagM"
#akurasi testing
mape.ardl <- MAPE(fore.ardl$forecasts, test$Yt) #data testing

#akurasi data training
mape_train <- GoF(model.ardl)["MAPE"]

c("MAPE_testing" = mape.ardl, "MAPE_training" = mape_train)
## $MAPE_testing
## [1] 0.1982454
## 
## $MAPE_training.MAPE
## [1] 0.07410837

Diperoleh nilai MAPE Testing sebesar 0.1982454 dan nilai MAPE Training sebesar 0.07410837. Sehingga didapatkan nilai MAPE Testing lebih besar dari nilai MAPE Training.

#penentuan lag optimum
ardlBoundOrders(data = data.frame(data) , formula = Yt ~ Xt ) #yang digunakan harusnya data train, tetapi karena keterbatasan data jika menggunakan data train menyebabkan error sehingga dicontohkan menggunakan keseluruhan data
## $p
##   Xt
## 1 15
## 
## $q
## [1] 5
## 
## $Stat.table
##           q = 1    q = 2    q = 3    q = 4    q = 5    q = 6    q = 7    q = 8
## p = 1  2248.735 2245.339 2229.620 2214.967 2207.268 2202.682 2197.356 2186.308
## p = 2  2239.771 2232.156 2218.370 2204.207 2195.852 2190.264 2184.876 2174.541
## p = 3  2225.696 2225.696 2218.912 2205.065 2196.910 2191.117 2185.962 2175.542
## p = 4  2208.495 2210.472 2210.472 2204.183 2196.646 2190.659 2185.598 2175.865
## p = 5  2197.030 2198.783 2200.776 2200.776 2189.747 2183.262 2178.584 2169.386
## p = 6  2178.731 2179.654 2181.512 2183.008 2183.008 2184.989 2180.410 2171.275
## p = 7  2185.818 2184.026 2183.707 2184.561 2180.879 2180.879 2181.359 2172.496
## p = 8  2183.549 2179.484 2176.259 2175.887 2171.017 2172.523 2172.523 2173.246
## p = 9  2175.558 2172.524 2167.667 2167.157 2160.183 2161.671 2163.540 2163.540
## p = 10 2170.456 2168.294 2163.688 2162.427 2154.521 2156.195 2158.195 2160.186
## p = 11 2169.918 2167.207 2163.345 2162.041 2152.335 2154.225 2155.946 2157.354
## p = 12 2166.276 2163.034 2158.522 2158.287 2148.941 2150.929 2152.485 2152.981
## p = 13 2167.803 2163.613 2157.698 2156.998 2145.684 2147.666 2148.888 2148.334
## p = 14 2163.416 2159.213 2153.708 2153.164 2142.451 2144.446 2145.745 2145.196
## p = 15 2145.927 2142.289 2136.031 2135.871 2126.544 2128.490 2129.931 2129.908
##           q = 9   q = 10   q = 11   q = 12   q = 13   q = 14   q = 15
## p = 1  2176.082 2172.715 2169.627 2165.566 2160.642 2157.274 2149.986
## p = 2  2165.594 2161.763 2158.630 2154.430 2149.893 2146.778 2139.634
## p = 3  2166.404 2162.547 2159.476 2155.034 2150.555 2147.460 2140.183
## p = 4  2166.750 2162.910 2159.881 2155.739 2151.430 2148.348 2140.995
## p = 5  2158.631 2154.677 2151.617 2147.626 2142.672 2139.647 2131.936
## p = 6  2160.358 2156.461 2153.400 2149.398 2144.461 2141.446 2133.720
## p = 7  2161.534 2157.622 2154.489 2150.535 2145.588 2142.598 2134.858
## p = 8  2162.308 2158.477 2155.270 2151.072 2146.033 2143.027 2134.939
## p = 9  2164.297 2160.445 2157.240 2153.052 2147.982 2144.977 2136.840
## p = 10 2160.186 2162.180 2158.964 2154.822 2149.763 2146.771 2138.603
## p = 11 2158.885 2158.885 2160.877 2156.771 2151.685 2148.700 2140.441
## p = 12 2154.117 2156.107 2156.107 2156.891 2151.412 2148.418 2139.653
## p = 13 2148.840 2150.669 2151.845 2151.845 2153.408 2150.411 2141.652
## p = 14 2145.840 2147.811 2149.704 2150.382 2150.382 2152.020 2143.035
## p = 15 2130.413 2132.347 2134.338 2134.556 2136.274 2136.274 2138.264
## 
## $min.Stat
## [1] 2126.544
#PEMODELAN DLM dan ARDL dengan library dynlm
#library(dynlm)

#sama dengan model dlm p=1
cons_lm1 <- dynlm(Yt ~ Xt+L(Xt),data = train.ts)

#sama dengan model ardl p=0 q=1
cons_lm2 <- dynlm(Yt ~ Xt+L(Yt),data = train.ts)

#sama dengan ardl p=1 q=1
cons_lm3 <- dynlm(Yt ~ Xt+L(Xt)+L(Yt),data = train.ts)

#sama dengan dlm p=2
cons_lm4 <- dynlm(Yt ~ Xt+L(Xt)+L(Xt,2),data = train.ts)
#Ringkasan Model
summary(cons_lm1)
## 
## Time series regression with "ts" data:
## Start = 2, End = 292
## 
## Call:
## dynlm(formula = Yt ~ Xt + L(Xt), data = train.ts)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -33.921  -9.995   0.708  10.955  32.305 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  95.9128     3.7680  25.455  < 2e-16 ***
## Xt           -2.7229     0.5094  -5.346 1.84e-07 ***
## L(Xt)         1.4055     0.5057   2.779  0.00581 ** 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 14.63 on 288 degrees of freedom
## Multiple R-squared:  0.2934, Adjusted R-squared:  0.2885 
## F-statistic: 59.79 on 2 and 288 DF,  p-value: < 2.2e-16
summary(cons_lm2)
## 
## Time series regression with "ts" data:
## Start = 2, End = 292
## 
## Call:
## dynlm(formula = Yt ~ Xt + L(Yt), data = train.ts)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -20.1938  -4.3465  -0.4302   3.5070  20.6395 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 15.11531    3.24410   4.659 4.85e-06 ***
## Xt          -0.24475    0.07249  -3.376 0.000836 ***
## L(Yt)        0.85924    0.02806  30.619  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 7.186 on 288 degrees of freedom
## Multiple R-squared:  0.8295, Adjusted R-squared:  0.8283 
## F-statistic: 700.5 on 2 and 288 DF,  p-value: < 2.2e-16
summary(cons_lm3)
## 
## Time series regression with "ts" data:
## Start = 2, End = 292
## 
## Call:
## dynlm(formula = Yt ~ Xt + L(Xt) + L(Yt), data = train.ts)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -19.4737  -2.4665   0.0533   3.1498  18.2177 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  6.82609    2.37894   2.869  0.00442 ** 
## Xt          -3.08676    0.17955 -17.192  < 2e-16 ***
## L(Xt)        3.00224    0.18155  16.536  < 2e-16 ***
## L(Yt)        0.92536    0.02051  45.117  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 5.152 on 287 degrees of freedom
## Multiple R-squared:  0.9127, Adjusted R-squared:  0.9118 
## F-statistic:  1000 on 3 and 287 DF,  p-value: < 2.2e-16
summary(cons_lm4)
## 
## Time series regression with "ts" data:
## Start = 3, End = 292
## 
## Call:
## dynlm(formula = Yt ~ Xt + L(Xt) + L(Xt, 2), data = train.ts)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -34.735 -10.396   1.032  10.844  31.908 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  95.5225     3.8105  25.068  < 2e-16 ***
## Xt           -2.7795     0.5103  -5.446 1.11e-07 ***
## L(Xt)         0.6573     0.6849   0.960    0.338    
## L(Xt, 2)      0.8202     0.5061   1.621    0.106    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 14.61 on 286 degrees of freedom
## Multiple R-squared:  0.2974, Adjusted R-squared:   0.29 
## F-statistic: 40.35 on 3 and 286 DF,  p-value: < 2.2e-16
#SSE
deviance(cons_lm1)
## [1] 61638.52
deviance(cons_lm2)
## [1] 14873.74
deviance(cons_lm3)
## [1] 7616.657
deviance(cons_lm4)
## [1] 61070.82

Uji Diagnostik Model

Uji Non Autokorelasi

#uji model
if(require("lmtest")) encomptest(cons_lm1, cons_lm2)
## Encompassing test
## 
## Model 1: Yt ~ Xt + L(Xt)
## Model 2: Yt ~ Xt + L(Yt)
## Model E: Yt ~ Xt + L(Xt) + L(Yt)
##           Res.Df Df       F    Pr(>F)    
## M1 vs. ME    287 -1 2035.57 < 2.2e-16 ***
## M2 vs. ME    287 -1  273.45 < 2.2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
#Diagnostik
#durbin watson
dwtest(cons_lm1)
## 
##  Durbin-Watson test
## 
## data:  cons_lm1
## DW = 0.13556, p-value < 2.2e-16
## alternative hypothesis: true autocorrelation is greater than 0
dwtest(cons_lm2)
## 
##  Durbin-Watson test
## 
## data:  cons_lm2
## DW = 2.0681, p-value = 0.6823
## alternative hypothesis: true autocorrelation is greater than 0
dwtest(cons_lm3)
## 
##  Durbin-Watson test
## 
## data:  cons_lm3
## DW = 1.8372, p-value = 0.06655
## alternative hypothesis: true autocorrelation is greater than 0
dwtest(cons_lm4)
## 
##  Durbin-Watson test
## 
## data:  cons_lm4
## DW = 0.1396, p-value < 2.2e-16
## alternative hypothesis: true autocorrelation is greater than 0

Uji Heterogenitas

#heterogenitas
bptest(cons_lm1)
## 
##  studentized Breusch-Pagan test
## 
## data:  cons_lm1
## BP = 15.981, df = 2, p-value = 0.0003386
bptest(cons_lm2)
## 
##  studentized Breusch-Pagan test
## 
## data:  cons_lm2
## BP = 4.6984, df = 2, p-value = 0.09544
bptest(cons_lm3)
## 
##  studentized Breusch-Pagan test
## 
## data:  cons_lm3
## BP = 19.402, df = 3, p-value = 0.0002257
bptest(cons_lm4)
## 
##  studentized Breusch-Pagan test
## 
## data:  cons_lm4
## BP = 17.687, df = 3, p-value = 0.0005104

Uji Normalitas

#shapiro wilk
shapiro.test(residuals(cons_lm1))
## 
##  Shapiro-Wilk normality test
## 
## data:  residuals(cons_lm1)
## W = 0.99027, p-value = 0.04992
shapiro.test(residuals(cons_lm2))
## 
##  Shapiro-Wilk normality test
## 
## data:  residuals(cons_lm2)
## W = 0.9879, p-value = 0.01551
shapiro.test(residuals(cons_lm3))
## 
##  Shapiro-Wilk normality test
## 
## data:  residuals(cons_lm3)
## W = 0.97874, p-value = 0.000253
shapiro.test(residuals(cons_lm4))
## 
##  Shapiro-Wilk normality test
## 
## data:  residuals(cons_lm4)
## W = 0.9895, p-value = 0.03467

Perbandingan Keakuratan Ramalan

#PERBANDINGAN
akurasi <- matrix(c(mape.koyck, mape.dlm, mape.dlm2, mape.ardl))
row.names(akurasi)<- c("Koyck","DLM 1","DLM 2","Autoregressive")
colnames(akurasi) <- c("MAPE")
akurasi
##                     MAPE
## Koyck          0.1570652
## DLM 1          0.1933397
## DLM 2          0.1914698
## Autoregressive 0.1982454

Diperoleh hasil metode peramalan terbaik yaitu metode Autoregressive. Hal ini dapat dilihat dari nilai akurasi MAPE yang dihasilkan paling kecil dibandingkan nilai akurasi nilai MAPE metode lain.

#PLOT
par(mfrow=c(1,1))
plot(test$Xt, test$Yt, type="b", col="black")
points(test$Xt, fore.koyck$forecasts,col="red")
lines(test$Xt, fore.koyck$forecasts,col="red")
points(test$Xt, fore.dlm$forecasts,col="blue")
lines(test$Xt, fore.dlm$forecasts,col="blue")
points(test$Xt, fore.dlm2$forecasts,col="orange")
lines(test$Xt, fore.dlm2$forecasts,col="orange")
points(test$Xt, fore.ardl$forecasts,col="green")
lines(test$Xt, fore.ardl$forecasts,col="green")
legend("topleft",c("aktual", "koyck","DLM 1","DLM 2", "autoregressive"), lty=1, col=c("black","red","blue","orange","green"), cex=0.8)

Kesimpulan

Data cuaca harian di Kota Delhi pada rentang waktu 1 Januari 2016 sampai 31 Desember 2016, terdapat hubungan negatif cukup kuat antara Meantemp dengan Humidity yang memiliki nilai korelasi sebesar -0.5087387. Berdasarkan perbandingan keakuratan ramalan, metode yang terbaik yaitu metode Autoregressive karena metode tersebut menghasilkan nilai MAPE terkecil.