library(tseries)
## Warning: package 'tseries' was built under R version 4.3.2
## Registered S3 method overwritten by 'quantmod':
## method from
## as.zoo.data.frame zoo
library(lmtest)
## Warning: package 'lmtest' was built under R version 4.3.2
## Loading required package: zoo
## Warning: package 'zoo' was built under R version 4.3.2
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
library(olsrr)
## Warning: package 'olsrr' was built under R version 4.3.2
##
## Attaching package: 'olsrr'
## The following object is masked from 'package:datasets':
##
## rivers
library(ggplot2)
## Warning: package 'ggplot2' was built under R version 4.3.2
df = read.csv("regresi_UAS.csv", header=TRUE, sep = ";")
head(df)
## Kabupaten. Kota Y X1 X2 X3 X4
## 1 1 Pacitan 71.13 21.29 85.18 7.51 86.50
## 2 2 Ponorogo 72.16 23.97 85.32 10.97 79.19
## 3 3 Trenggalek 72.99 19.74 86.78 5.67 73.06
## 4 4 Tulungagung 73.36 20.43 86.36 3.24 80.70
## 5 5 Blitar 72.88 22.31 85.04 5.59 82.34
## 6 6 Kediri 72.22 24.83 78.95 5.04 75.60
tail(df)
## Kabupaten. Kota Y X1 X2 X3 X4
## 33 33 Kota Probolinggo 69.80 19.99 79.24 6.39 73.93
## 34 34 Kota Pasuruan 70.92 35.93 77.04 2.70 78.33
## 35 35 Kota Mojokerto 72.77 20.55 91.78 1.59 82.48
## 36 36 Kota Madiun 72.49 21.67 93.76 1.44 83.71
## 37 37 Kota Surabaya 73.93 21.42 72.78 1.61 74.00
## 38 38 Kota Batu 72.24 26.34 71.64 2.28 87.80
PLOT KORELASI
# Scatterplot korelasi Y dengan setiap variabel X
scatterplots <- lapply(names(df)[grep("^X", names(df))], function(var) {
ggplot(df, aes(x = .data[[var]], y = Y)) +
geom_point(color = "dodgerblue", size = 4, alpha = 0.7, shape = 16) +
geom_smooth(method = "lm", se = FALSE, color = "orange", formula = y ~ x) +
labs(
title = paste("Scatterplot Korelasi Y dengan", var),
x = var,
y = "Y"
) +
theme_minimal()
})
print(scatterplots)
## [[1]]

##
## [[2]]

##
## [[3]]

##
## [[4]]

DETEKSI MULTIKOLINIERITAS
library(car)
## Warning: package 'car' was built under R version 4.3.2
## Loading required package: carData
reg <- lm(Y~X1+X2+X3+X4, data=df)
vif(reg)
## X1 X2 X3 X4
## 5.289457 3.270261 2.915271 2.951274
PEMODELAN REGRESI ENTER
reg <- lm(Y~X1+X2+X3+X4, data=df)
summary(reg)
##
## Call:
## lm(formula = Y ~ X1 + X2 + X3 + X4, data = df)
##
## Residuals:
## Min 1Q Median 3Q Max
## -2.1593 -0.4554 0.0224 0.4477 2.5606
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 70.547287 3.124004 22.582 <2e-16 ***
## X1 -0.068017 0.029840 -2.279 0.0292 *
## X2 -0.002847 0.018292 -0.156 0.8773
## X3 -0.102214 0.050778 -2.013 0.0523 .
## X4 0.049377 0.018082 2.731 0.0101 *
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.9537 on 33 degrees of freedom
## Multiple R-squared: 0.8125, Adjusted R-squared: 0.7898
## F-statistic: 35.75 on 4 and 33 DF, p-value: 1.457e-11
ASUMSI RESIDUAL NORMAL
ks_test_result <- ks.test(residuals(reg), "pnorm")
print("Kolmogorov-Smirnov test:")
## [1] "Kolmogorov-Smirnov test:"
print(ks_test_result)
##
## Exact one-sample Kolmogorov-Smirnov test
##
## data: residuals(reg)
## D = 0.095745, p-value = 0.8443
## alternative hypothesis: two-sided
ASUMSI RESIDUAL INDEPENDEN
dw_test_result <- lmtest::dwtest(reg)
print("Durbin-Watson test:")
## [1] "Durbin-Watson test:"
print(dw_test_result)
##
## Durbin-Watson test
##
## data: reg
## DW = 2.0489, p-value = 0.439
## alternative hypothesis: true autocorrelation is greater than 0
ASUMSI RESIDUAL IDENTIK
# Load the package
library(skedastic)
## Warning: package 'skedastic' was built under R version 4.3.2
glejser_test <- glejser(reg)
glejser_test
## # A tibble: 1 × 4
## statistic p.value parameter alternative
## <dbl> <dbl> <dbl> <chr>
## 1 13.0 0.0111 4 greater
library("lmtest")
library(zoo)
bptest(reg)
##
## studentized Breusch-Pagan test
##
## data: reg
## BP = 10.932, df = 4, p-value = 0.02734
METODE FORWARD SELECTION
olsrr::ols_step_forward_p(reg)
##
## Selection Summary
## -------------------------------------------------------------------------
## Variable Adj.
## Step Entered R-Square R-Square C(p) AIC RMSE
## -------------------------------------------------------------------------
## 1 X1 0.7143 0.7063 16.2875 120.8832 1.1272
## 2 X4 0.7870 0.7748 5.4906 111.7240 0.9871
## 3 X3 0.8124 0.7958 3.0242 108.9037 0.9399
## -------------------------------------------------------------------------
reg1 <- lm(Y~X1+X3+X4, data=df)
summary(reg1)
##
## Call:
## lm(formula = Y ~ X1 + X3 + X4, data = df)
##
## Residuals:
## Min 1Q Median 3Q Max
## -2.13753 -0.44678 0.03723 0.43130 2.56849
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 70.14204 1.70160 41.221 < 2e-16 ***
## X1 -0.06514 0.02310 -2.820 0.00796 **
## X3 -0.09920 0.04626 -2.144 0.03924 *
## X4 0.05047 0.01641 3.076 0.00413 **
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.9399 on 34 degrees of freedom
## Multiple R-squared: 0.8124, Adjusted R-squared: 0.7958
## F-statistic: 49.06 on 3 and 34 DF, p-value: 1.912e-12
ks_test_result1 <- ks.test(residuals(reg1), "pnorm")
print("Kolmogorov-Smirnov test:")
## [1] "Kolmogorov-Smirnov test:"
print(ks_test_result1)
##
## Exact one-sample Kolmogorov-Smirnov test
##
## data: residuals(reg1)
## D = 0.096731, p-value = 0.8357
## alternative hypothesis: two-sided
# Load the package
library(skedastic)
glejser_test <- glejser(reg1)
glejser_test
## # A tibble: 1 × 4
## statistic p.value parameter alternative
## <dbl> <dbl> <dbl> <chr>
## 1 9.19 0.0269 3 greater
dw_test_result <- lmtest::dwtest(reg1)
print("Durbin-Watson test:")
## [1] "Durbin-Watson test:"
print(dw_test_result)
##
## Durbin-Watson test
##
## data: reg1
## DW = 2.0438, p-value = 0.4674
## alternative hypothesis: true autocorrelation is greater than 0