# ==============================================================================
# GTWR ANALISIS KEJADIAN BANJIR - PULAU SUMATERA
# Variabel: Y (Kejadian Banjir), X1-X7 (Faktor Lingkungan/Cuaca)
# Metode: Transformasi Log-Shift (untuk data negatif) & GTWR (Gaussian/Tricube)
# ==============================================================================
SAVE_VIS <- FALSE # do not save figures (set TRUE to save)
SAVE_DATA <- TRUE # save SDF CSVs (set FALSE to skip)
# 1. SETUP & LIBRARY -----------------------------------------------------------
setwd("D:/Kuliah/Lomba/PKM-AI 2026/Data") # Sesuaikan path ini
suppressPackageStartupMessages({
library(readxl); library(dplyr); library(tidyr); library(jsonlite)
library(sf); library(sp); library(GWmodel); library(ggplot2)
library(viridis);library(spdep); library(viridis)
library(car); library(lmtest); library(MLmetrics); library(nortest)
library(rnaturalearth); library(scales);library(purrr); library(forecast)
})
## Warning: package 'sf' was built under R version 4.5.2
## Warning: package 'sp' was built under R version 4.5.2
## Warning: package 'GWmodel' was built under R version 4.5.2
## Warning: package 'robustbase' was built under R version 4.5.2
## Warning: package 'ggplot2' was built under R version 4.5.2
## Warning: package 'viridis' was built under R version 4.5.2
## Warning: package 'spdep' was built under R version 4.5.2
## Warning: package 'spData' was built under R version 4.5.2
## Warning: package 'car' was built under R version 4.5.2
## Warning: package 'lmtest' was built under R version 4.5.2
## Warning: package 'MLmetrics' was built under R version 4.5.2
## Warning: package 'nortest' was built under R version 4.5.2
## Warning: package 'rnaturalearth' was built under R version 4.5.2
# ==============================================================================
# BAGIAN 1: KONFIGURASI, LOAD DATA, DAN TRANSFORMASI (SINKRONISASI COUNT DATA)
# ==============================================================================
# 1. KONFIGURASI ---------------------------------------------------------------
excel_path <- "D:/Kuliah/Lomba/PKM-AI 2026/Data/data_banjir_sumatera.xlsx"
sheet_name <- "Pakai"
pred_use <- paste0("X", 1:7) # Sesuaikan dengan nama kolom X1, X2, dst.
target_year <- 2024
# 2. LOAD DATA & KOORDINAT -----------------------------------------------------
# A. Baca Data Excel
dat <- read_excel(excel_path, sheet = sheet_name)
# Validasi: Pastikan kolom wajib ada
required_cols <- c("Provinsi", "Tahun", "Y", pred_use)
missing_cols <- setdiff(required_cols, names(dat))
if(length(missing_cols) > 0) stop(paste("Kolom hilang:", paste(missing_cols, collapse=", ")))
# B. Ambil Koordinat Provinsi Indonesia (JSON)
wilayah_url <- "https://raw.githubusercontent.com/yusufsyaifudin/wilayah-indonesia/master/data/list_of_area/provinces.json"
wilayah_raw <- tryCatch(
{ fromJSON(wilayah_url) },
error = function(e) { stop("Gagal mengunduh JSON koordinat. Cek koneksi internet.") }
)
# C. Standarisasi Nama untuk Join (Huruf Kapital & Hapus Spasi)
wilayah_raw$name <- toupper(trimws(wilayah_raw$name))
dat$PROV_MATCH <- toupper(trimws(dat$Provinsi))
# D. Gabungkan Data + Koordinat
dat2 <- dat %>%
left_join(wilayah_raw %>% select(name, latitude, longitude),
by = c("PROV_MATCH" = "name")) %>%
rename(lat = latitude, lon = longitude) %>%
mutate(
lat = as.numeric(lat),
lon = as.numeric(lon),
Y = as.numeric(Y) # Pastikan Y dianggap angka
) %>%
filter(!is.na(lat) & !is.na(lon)) %>% # Hapus jika koordinat tidak ketemu
select(Provinsi, PROV_MATCH, Tahun, lat, lon, Y, everything())
# Cek Data Hilang
if(nrow(dat2) < nrow(dat)) {
warning("PERINGATAN: Jumlah baris berkurang setelah Join Koordinat. Cek penulisan nama Provinsi!")
}
# 3. TRANSFORMASI DATA (LOG-NORMAL & LOG-SHIFT) --------------------------------
# Inisialisasi data untuk pemodelan
df_mod <- dat2
# A. Transformasi Variabel Y (KHUSUS DATA COUNT / JUMLAH)
# HAPUS LOGIKA PERSEN (Y/100). GANTI DENGAN LOG(Y + 1).
# Penjelasan: Data Count sering memiliki varians besar (Heteroskedastisitas) dan menceng kanan.
# Logaritma menstabilkan varians dan mendekatkan ke distribusi normal untuk OLS.
# Ditambah 1 agar jika ada nilai 0, log(0) tidak error (-Inf).
df_mod$t_Y <- log(df_mod$Y + 1)
cat("--- Status Transformasi Y ---\n")
## --- Status Transformasi Y ---
cat("Tipe Data : Count (Jumlah Kejadian)\n")
## Tipe Data : Count (Jumlah Kejadian)
cat("Metode : Log-Level (y = log(Y + 1))\n")
## Metode : Log-Level (y = log(Y + 1))
cat("Range Asli :", min(df_mod$Y), "sampai", max(df_mod$Y), "\n")
## Range Asli : 1 sampai 120
cat("Range Trans :", round(min(df_mod$t_Y),2), "sampai", round(max(df_mod$t_Y),2), "\n\n")
## Range Trans : 0.69 sampai 4.8
# B. Transformasi Variabel X (Log-Shift untuk menangani nilai Negatif)
cat("--- Status Transformasi X ---\n")
## --- Status Transformasi X ---
for (v in pred_use) {
vals <- df_mod[[v]]
# Cek apakah kolom ada isinya
if(is.null(vals)) stop(paste("Kolom", v, "tidak ditemukan di Excel!"))
min_val <- min(vals, na.rm = TRUE)
if (min_val <= 0) {
# Jika ada nilai negatif/nol, geser dulu baru di-log
shift_factor <- abs(min_val) + 1
df_mod[[paste0("t_", v)]] <- log(vals + shift_factor)
cat(sprintf("> %s (Min: %.2f) -> Digeser +%.2f -> Log\n", v, min_val, shift_factor))
} else {
# Jika semua positif, langsung log natural
df_mod[[paste0("t_", v)]] <- log(vals)
cat(sprintf("> %s (Semua Positif) -> Log Langsung\n", v))
}
}
## > X1 (Min: -2312.00) -> Digeser +2313.00 -> Log
## > X2 (Semua Positif) -> Log Langsung
## > X3 (Semua Positif) -> Log Langsung
## > X4 (Min: 0.00) -> Digeser +1.00 -> Log
## > X5 (Semua Positif) -> Log Langsung
## > X6 (Semua Positif) -> Log Langsung
## > X7 (Semua Positif) -> Log Langsung
# C. Finalisasi Data
# Hapus baris jika ada NA di hasil transformasi (Jaga-jaga)
df_mod <- na.omit(df_mod)
cat("\nData siap digunakan. Jumlah Baris:", nrow(df_mod), "\n")
##
## Data siap digunakan. Jumlah Baris: 70
# ==============================================================================
# BAGIAN 2: SPATIAL DATA, OLS, DAN GTWR (SINKRONISASI COUNT DATA)
# ==============================================================================
# 4. PERSIAPAN DATA SPASIAL (SF & SP) ------------------------------------------
# Ubah ke Simple Features (sf)
# remove = FALSE agar kolom lat/lon tetap ada untuk keperluan plotting nanti
df_sf <- st_as_sf(df_mod, coords = c("lon", "lat"), crs = 4326, remove = FALSE)
# Pilih kolom yang dibutuhkan (Pembersihan)
# Mengambil variabel asli (Y) dan hasil transformasi (t_...)
df_sf_clean <- df_sf %>%
select(Provinsi, Tahun, Y, t_Y, starts_with("t_"))
# Kembalikan ke format Spatial (sp) untuk library GWmodel
df_sp_final <- as_Spatial(df_sf_clean)
# Update Formula Regresi
# Menggunakan t_Y (Log Count) sebagai dependen
form_glob <- as.formula(paste0("t_Y ~ ", paste0("t_", pred_use, collapse = " + ")))
# 5. MODEL GLOBAL (OLS) --------------------------------------------------------
cat("\n================ HASIL GLOBAL OLS ================\n")
##
## ================ HASIL GLOBAL OLS ================
ols <- lm(form_glob, data = df_sp_final)
# Print Diagnostik OLS
print(summary(ols))
##
## Call:
## lm(formula = form_glob, data = df_sp_final)
##
## Residuals:
## Min 1Q Median 3Q Max
## -1.17963 -0.33074 0.00206 0.35964 1.21862
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) -5.307863 1.973954 -2.689 0.00919 **
## t_X1 0.003183 0.051161 0.062 0.95059
## t_X2 0.108639 0.068579 1.584 0.11825
## t_X3 -0.170868 0.180382 -0.947 0.34719
## t_X4 0.406335 0.072492 5.605 5.11e-07 ***
## t_X5 0.578719 0.086248 6.710 6.81e-09 ***
## t_X6 0.004479 0.096456 0.046 0.96311
## t_X7 0.561894 0.284209 1.977 0.05249 .
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.5353 on 62 degrees of freedom
## Multiple R-squared: 0.7483, Adjusted R-squared: 0.7199
## F-statistic: 26.33 on 7 and 62 DF, p-value: 2.453e-16
cat("AIC OLS:", AIC(ols), "\n")
## AIC OLS: 120.6618
cat("VIF OLS:\n"); print(car::vif(ols))
## VIF OLS:
## t_X1 t_X2 t_X3 t_X4 t_X5 t_X6 t_X7
## 1.145892 1.749647 1.746665 1.334669 1.978235 1.838809 1.232926
# Hitung MAPE OLS (Berdasarkan Y Asli - COUNT)
# Back-transform: Karena t_Y = log(Y+1), maka Prediksi = exp(fitted) - 1
Y_hat_ols_raw <- exp(fitted(ols)) - 1
Y_hat_ols <- pmax(Y_hat_ols_raw, 0) # Pastikan tidak ada prediksi negatif
# Hitung error terhadap Data Asli (df_sp_final$Y)
mape_ols <- MLmetrics::MAPE(Y_hat_ols, df_sp_final$Y) # Hasil 0.xx
cat("MAPE OLS (Count):", round(mape_ols * 100, 3), "%\n")
## MAPE OLS (Count): 50.609 %
# Uji Asumsi Klasik
cat("\n--- Uji Asumsi Klasik ---\n")
##
## --- Uji Asumsi Klasik ---
print(lmtest::bptest(ols)) # Heteroskedastisitas
##
## studentized Breusch-Pagan test
##
## data: ols
## BP = 9.6637, df = 7, p-value = 0.2084
print(nortest::lillie.test(resid(ols))) # Normalitas
##
## Lilliefors (Kolmogorov-Smirnov) normality test
##
## data: resid(ols)
## D = 0.053734, p-value = 0.8856
# 6. MODEL GTWR (Geographically & Temporally Weighted) -------------------------
cat("\n================ HASIL GTWR (SUMATERA) ================\n")
##
## ================ HASIL GTWR (SUMATERA) ================
# Pastikan urutan data benar & definisikan proyeksi
df_sp_gtwr <- df_sp_final
sp::proj4string(df_sp_gtwr) <- sp::CRS("+proj=longlat +datum=WGS84")
df_sp_gtwr$Tahun <- as.integer(df_sp_gtwr$Tahun) # Tahun harus integer untuk GTWR
# A. Mencari Bandwidth Optimum (Cross Validation)
cat("Mencari Bandwidth Optimum (Proses ini mungkin memakan waktu)...\n")
## Mencari Bandwidth Optimum (Proses ini mungkin memakan waktu)...
bw_fixed <- GWmodel::bw.gtwr(form_glob, data=df_sp_gtwr, obs.tv=df_sp_gtwr$Tahun,
longlat=TRUE, kernel="tricube", approach="CV", adaptive=FALSE)
## Fixed bandwidth: 5.872049e+49 CV score: 43.45074
## Fixed bandwidth: 3.629851e+49 CV score: 43.45074
bw_adapt <- GWmodel::bw.gtwr(form_glob, data=df_sp_gtwr, obs.tv=df_sp_gtwr$Tahun,
longlat=TRUE, kernel="tricube", approach="CV", adaptive=TRUE)
## Adaptive bandwidth: 50 CV score: 43.62059
## Adaptive bandwidth: 39 CV score: 45.95619
## Adaptive bandwidth: 58 CV score: 43.21115
## Adaptive bandwidth: 62 CV score: 43.38209
## Adaptive bandwidth: 54 CV score: 43.46024
## Adaptive bandwidth: 59 CV score: 43.23567
## Adaptive bandwidth: 56 CV score: 43.42175
## Adaptive bandwidth: 58 CV score: 43.21115
cat("Bandwidth Fixed:", bw_fixed, "\n")
## Bandwidth Fixed: 3.629851e+49
cat("Bandwidth Adaptive:", bw_adapt, "\n")
## Bandwidth Adaptive: 58
# B. Menjalankan Model GTWR
# Model 1: Fixed Kernel (Radius Tetap)
cat("Running GTWR Fixed Kernel...\n")
## Running GTWR Fixed Kernel...
gtwr_fixed <- GWmodel::gtwr(form_glob, data=df_sp_gtwr, obs.tv=df_sp_gtwr$Tahun,
st.bw=bw_fixed, longlat=TRUE, kernel="tricube", adaptive=FALSE)
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
# Model 2: Adaptive Kernel (Jumlah Tetangga Tetap - Recommended)
cat("Running GTWR Adaptive Kernel...\n")
## Running GTWR Adaptive Kernel...
gtwr_adapt <- GWmodel::gtwr(form_glob, data=df_sp_gtwr, obs.tv=df_sp_gtwr$Tahun,
st.bw=bw_adapt, longlat=TRUE, kernel="tricube", adaptive=TRUE)
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
## Warning in sigma.hat1 * betas.SE[i, ]: Recycling array of length 1 in array-vector arithmetic is deprecated.
## Use c() or as.vector() instead.
# 7. EVALUASI MODEL & PERBANDINGAN ---------------------------------------------
cat("\n--- DIAGNOSTIK GTWR ---\n")
##
## --- DIAGNOSTIK GTWR ---
cat("AIC Fixed :", gtwr_fixed$GTW.diagnostic$AIC, "\n")
## AIC Fixed : 107.6856
cat("AIC Adaptive :", gtwr_adapt$GTW.diagnostic$AIC, "\n")
## AIC Adaptive : 102.3347
cat("R2 Fixed :", gtwr_fixed$GTW.diagnostic$gw.R2, "\n")
## R2 Fixed : 0.7974766
cat("R2 Adaptive :", gtwr_adapt$GTW.diagnostic$gw.R2, "\n")
## R2 Adaptive : 0.8160295
# Hitung MAPE GTWR (Kembalikan ke Count)
# Rumus: exp(yhat) - 1
Yhat_fixed_raw <- exp(gtwr_fixed$SDF$yhat) - 1
Yhat_fixed_count <- pmax(Yhat_fixed_raw, 0) # Cegah negatif
Yhat_adapt_raw <- exp(gtwr_adapt$SDF$yhat) - 1
Yhat_adapt_count <- pmax(Yhat_adapt_raw, 0) # Cegah negatif
# Simpan hasil prediksi count ke dalam object SDF (PENTING UNTUK VISUALISASI NANTI)
gtwr_fixed$SDF$pred_count <- Yhat_fixed_count
gtwr_adapt$SDF$pred_count <- Yhat_adapt_count
# Hitung MAPE
mape_fixed <- MLmetrics::MAPE(Yhat_fixed_count, df_sp_gtwr$Y)
mape_adapt <- MLmetrics::MAPE(Yhat_adapt_count, df_sp_gtwr$Y)
cat("\n--- PERBANDINGAN AKURASI (MAPE) ---\n")
##
## --- PERBANDINGAN AKURASI (MAPE) ---
cat("MAPE OLS :", round(mape_ols * 100, 3), "%\n")
## MAPE OLS : 50.609 %
cat("MAPE GTWR Fixed :", round(mape_fixed * 100, 3), "%\n")
## MAPE GTWR Fixed : 40.168 %
cat("MAPE GTWR Adaptive :", round(mape_adapt * 100, 3), "%\n")
## MAPE GTWR Adaptive : 39.735 %
# Menyimpan hasil prediksi GTWR ke dalam file CSV agar bisa dibuka di GIS lain
SDF_fix <- as.data.frame(gtwr_fixed$SDF)
SDF_fix$banjir_hat_pct <- Yhat_fixed_count # Ubah nama kolom
SDF_adap <- as.data.frame(gtwr_adapt$SDF)
SDF_adap$banjir_hat_pct <- Yhat_adapt_count # Ubah nama kolom
if (SAVE_DATA) {
write.csv(SDF_fix, "GTWR_FIXED_tricube_SDF_banjir_sumatera.csv", row.names=FALSE)
write.csv(SDF_adap, "GTWR_ADAPT_tricube_SDF_banjir_sumatera.csv", row.names=FALSE)
cat("File CSV hasil prediksi berhasil disimpan.\n")
}
## File CSV hasil prediksi berhasil disimpan.
# ==============================================================================
# BAGIAN 3: VISUALISASI PETA AKTUAL (OBSERVED - COUNT DATA)
# ==============================================================================
# 1. KONFIGURASI VISUALISASI ---------------------------------------------------
# Definisikan variabel kontrol jika belum ada
if (!exists("target_year")) target_year <- 2024 # Ganti tahun sesuai kebutuhan
if (!exists("SAVE_VIS")) SAVE_VIS <- TRUE # Set TRUE jika ingin save gambar
cat("--- Konfigurasi Visualisasi ---\n")
## --- Konfigurasi Visualisasi ---
cat("Target Tahun :", target_year, "\n")
## Target Tahun : 2024
cat("Simpan Gambar :", SAVE_VIS, "\n")
## Simpan Gambar : FALSE
# 2. BASE MAPS (Peta Dasar) ----------------------------------------------------
# Ambil peta Provinsi khusus Indonesia
ne_indo <- rnaturalearth::ne_states(country = "Indonesia", returnclass = "sf")
# Daftar Provinsi di Sumatera
list_sumatera <- c("ACEH", "SUMATERA UTARA", "SUMATERA BARAT", "RIAU", "JAMBI",
"SUMATERA SELATAN", "BENGKULU", "LAMPUNG",
"KEPULAUAN BANGKA BELITUNG", "KEPULAUAN RIAU")
# Filter peta hanya untuk pulau Sumatera & Buat Kolom Join
map_sumatera_base <- ne_indo %>%
mutate(PROV_MATCH = toupper(name)) %>%
filter(PROV_MATCH %in% list_sumatera)
# 3. PERSIAPAN DATA LABEL & NILAI ----------------------------------------------
# Gunakan df_mod dari BAGIAN 1 (Data yang sudah bersih & ada koordinat)
# A. Siapkan Label (Nama Provinsi)
label_df <- df_mod %>%
filter(Tahun == target_year) %>%
select(Provinsi, lon, lat) %>%
distinct(Provinsi, .keep_all = TRUE) %>%
mutate(label = case_when(
Provinsi == "NANGGROE ACEH DARUSSALAM" ~ "ACEH",
Provinsi == "SUMATERA UTARA" ~ "SUMUT",
Provinsi == "SUMATERA BARAT" ~ "SUMBAR",
Provinsi == "SUMATERA SELATAN" ~ "SUMSEL",
Provinsi == "KEPULAUAN BANGKA BELITUNG" ~ "BABEL",
Provinsi == "KEPULAUAN RIAU" ~ "KEPRI",
TRUE ~ Provinsi
))
# B. Siapkan Data Banjir Tahun Terakhir (COUNT)
dat_year <- df_mod %>%
filter(Tahun == target_year) %>%
mutate(PROV = toupper(Provinsi)) %>%
group_by(PROV) %>%
# Gunakan SUM untuk memastikan total kejadian (Count)
summarise(Y = sum(Y, na.rm=TRUE), .groups="drop")
# C. Gabungkan Peta Dasar dengan Data Banjir
map_final <- map_sumatera_base %>%
left_join(dat_year, by = c("PROV_MATCH" = "PROV"))
# 4. PLOTTING ------------------------------------------------------------------
p_obs <- ggplot(map_final) +
# Layer Peta (Poligon)
geom_sf(aes(fill = Y), color = "white", linewidth = 0.4) +
# Layer Label (Teks)
geom_text(data = label_df, aes(x = lon, y = lat, label = label),
inherit.aes = FALSE, color = "black", size = 3,
fontface = "bold", check_overlap = TRUE) +
# Pewarnaan (Count / Jumlah)
scale_fill_viridis_c(
option = "magma",
direction = -1,
na.value = "grey90",
name = "Jumlah Kejadian", # Label legenda diperbaiki
labels = scales::comma # Format angka bulat (misal: 1,000)
) +
# Judul & Tema
labs(
title = paste0("Peta Sebaran Banjir Aktual — Tahun ", target_year),
subtitle = "Satuan: Jumlah Kejadian (Count) • Pulau Sumatera", # Subjudul diperjelas
x = NULL, y = NULL,
caption = "Sumber: Data Historis"
) +
theme_minimal(base_size = 11) +
theme(
plot.title = element_text(face = "bold"),
panel.grid = element_blank(),
axis.text = element_blank() # Hilangkan koordinat pinggir
)
# Tampilkan Peta
print(p_obs)

# 5. SIMPAN GAMBAR -------------------------------------------------------------
if (SAVE_VIS) {
filename <- sprintf("Peta_Banjir_Observed_Count_%d.png", target_year)
ggsave(filename, p_obs, width = 10, height = 8, dpi = 300)
cat(paste("Gambar berhasil disimpan sebagai:", filename, "\n"))
}
# ==============================================================================
# BAGIAN 4: VISUALISASI LANJUTAN GTWR (PREDIKSI COUNT & TREN TAHUNAN)
# ==============================================================================
# Pastikan Peta Dasar Sumatera & Label Sudah Tersedia (dari Bagian 3)
# Jika ne_indo, list_sumatera, map_sumatera_base belum ada, jalankan Bagian 3 dulu!
# A. Ambil Data Hasil Prediksi dari Objek GTWR
# Kita mengambil dari 'gtwr_adapt$SDF' karena di situlah hasil prediksi tersimpan
df_pred_raw <- as.data.frame(gtwr_adapt$SDF)
# --- PERBAIKAN DI SINI ---
# Kita kembalikan kolom Provinsi & Tahun dari data asli (df_mod)
# karena GWmodel seringkali menghapus kolom teks.
df_pred_raw$Provinsi <- df_mod$Provinsi
df_pred_raw$Tahun <- df_mod$Tahun
# B. Filter & Rapikan Data
df_pred_clean <- df_pred_raw %>%
rename(Y_pred_count = pred_count) %>% # Menggunakan kolom yang sudah di-backtransform
select(Provinsi, Tahun, Y_pred_count) %>%
mutate(PROV = toupper(Provinsi))
# C. Filter Tahun Target untuk Peta
pred_year_data <- df_pred_clean %>%
filter(Tahun == target_year) %>%
group_by(PROV) %>%
summarise(Y_hat = sum(Y_pred_count, na.rm=TRUE), .groups="drop")
# D. Join ke Peta Dasar
map_pred <- map_sumatera_base %>%
left_join(pred_year_data, by = c("PROV_MATCH" = "PROV"))
# E. Plotting Peta Prediksi
p_pred <- ggplot(map_pred) +
geom_sf(aes(fill = Y_hat), color = "white", linewidth = 0.4) +
# Label Nama Provinsi
geom_text(data = label_df, aes(x = lon, y = lat, label = label),
inherit.aes = FALSE, color = "black", size = 3,
fontface = "bold", check_overlap = TRUE) +
# Skala Warna (Count / Jumlah)
scale_fill_viridis_c(
option = "plasma",
na.value = "grey90",
name = "Est. Kejadian", # Label legenda
labels = scales::comma # Format angka bulat (misal: 1,500)
) +
# Judul & Keterangan
labs(
title = paste0("Prediksi Model GTWR (Adaptive) — Tahun ", target_year),
subtitle = "Satuan: Estimasi Jumlah Kejadian (Count) • Metode: Tricube Kernel",
x = NULL, y = NULL
) +
theme_minimal(base_size = 11) +
theme(
plot.title = element_text(face = "bold"),
panel.grid = element_blank(),
axis.text = element_blank()
)
# Tampilkan & Simpan
print(p_pred)

if (SAVE_VIS) {
ggsave(sprintf("Peta_Banjir_Prediksi_Count_%d.png", target_year),
p_pred, width=10, height=8, dpi=300)
}
# Menampilkan peta kecil-kecil per tahun untuk melihat evolusi banjir (Data Asli)
# A. Persiapan Data (Menggunakan Data Asli/Observasi)
dat_all <- df_mod %>%
mutate(PROV = toupper(Provinsi))
# B. Join ke Peta
map_all <- map_sumatera_base %>%
left_join(dat_all, by = c("PROV_MATCH" = "PROV"))
# C. Plotting Facet
p_all <- ggplot(map_all) +
geom_sf(aes(fill = Y), color = "white", linewidth = 0.2) +
# Skala Warna
scale_fill_viridis_c(
option = "magma",
direction = -1,
name = "Jml. Kejadian",
na.value = "grey90",
labels = scales::comma # Format angka ringkas
) +
# Pemisahan per Tahun
facet_wrap(~ Tahun) +
labs(
title = "Tren Historis Kejadian Banjir — Pulau Sumatera",
subtitle = "Data Observasi Aktual (Count Data)",
caption = "Sumber: Data Input"
) +
theme_minimal(base_size = 10) +
theme(
plot.title = element_text(face = "bold"),
panel.grid = element_blank(),
axis.text = element_blank(),
legend.position = "bottom",
legend.key.width = unit(2, "cm")
)
# Tampilkan & Simpan
print(p_all)

if (SAVE_VIS) {
ggsave("Peta_Banjir_Facet_Tahun_Count.png", p_all, width=12, height=8, dpi=300)
}
# Menyimpan hasil prediksi lengkap agar bisa dicek di Excel
if (exists("gtwr_adapt")) {
# Kita buat ulang dataframe hasil agar lengkap dengan nama provinsi
final_result <- as.data.frame(gtwr_adapt$SDF)
final_result$Provinsi <- df_mod$Provinsi # Tempel lagi
final_result$Tahun <- df_mod$Tahun # Tempel lagi
final_result <- final_result %>%
select(Provinsi, Tahun, y, pred_count, yhat, everything()) %>%
rename(
Aktual_Count = y,
Prediksi_Count = pred_count,
Prediksi_Log = yhat
)
write.csv(final_result, "Hasil_Prediksi_GTWR_Sumatera_Lengkap.csv", row.names = FALSE)
cat("\nFile CSV 'Hasil_Prediksi_GTWR_Sumatera_Lengkap.csv' berhasil disimpan.\n")
}
##
## File CSV 'Hasil_Prediksi_GTWR_Sumatera_Lengkap.csv' berhasil disimpan.
final_result
## Provinsi Tahun Aktual_Count Prediksi_Count Prediksi_Log
## 1 Aceh 2018 4.5325995 82.745794 4.427786
## 2 Sumatera Utara 2018 3.4965076 50.002239 3.931870
## 3 Sumatera Barat 2018 3.9318256 40.116963 3.716421
## 4 Riau 2018 2.7725887 11.687723 2.540635
## 5 Jambi 2018 2.0794415 11.378601 2.515969
## 6 Sumatera Selatan 2018 3.5263605 24.792578 3.250087
## 7 Bengkulu 2018 2.1972246 7.735903 2.167441
## 8 Lampung 2018 2.8903718 14.285556 2.726908
## 9 Kepulauan Bangka Belitung 2018 2.6390573 15.672026 2.813732
## 10 Kepulauan Riau 2018 1.6094379 3.877175 1.584566
## 11 Aceh 2019 4.1896547 56.519325 4.052121
## 12 Sumatera Utara 2019 3.1354942 34.067104 3.557263
## 13 Sumatera Barat 2019 3.4011974 35.883492 3.607764
## 14 Riau 2019 2.8332133 10.489080 2.441397
## 15 Jambi 2019 2.5649494 16.571275 2.866265
## 16 Sumatera Selatan 2019 3.4339872 29.124323 3.405333
## 17 Bengkulu 2019 2.7725887 12.060811 2.569616
## 18 Lampung 2019 2.3978953 14.120316 2.716039
## 19 Kepulauan Bangka Belitung 2019 2.5649494 10.530044 2.444956
## 20 Kepulauan Riau 2019 0.6931472 2.375187 1.216451
## 21 Aceh 2020 4.3820266 73.977953 4.317194
## 22 Sumatera Utara 2020 3.9889840 37.544757 3.651820
## 23 Sumatera Barat 2020 3.9889840 24.833262 3.251663
## 24 Riau 2020 1.9459101 9.219440 2.324292
## 25 Jambi 2020 3.0445224 10.857259 2.472940
## 26 Sumatera Selatan 2020 3.5263605 31.737086 3.488509
## 27 Bengkulu 2020 2.6390573 22.272516 3.147273
## 28 Lampung 2020 2.8903718 13.471837 2.672204
## 29 Kepulauan Bangka Belitung 2020 1.0986123 7.186241 2.102455
## 30 Kepulauan Riau 2020 1.0986123 2.190001 1.160021
## 31 Aceh 2021 4.6151205 103.960293 4.653582
## 32 Sumatera Utara 2021 4.4308168 58.060433 4.078561
## 33 Sumatera Barat 2021 4.0775374 58.023185 4.077930
## 34 Riau 2021 4.2195077 35.749335 3.604120
## 35 Jambi 2021 3.6375862 34.104002 3.558315
## 36 Sumatera Selatan 2021 3.6888795 35.495951 3.597201
## 37 Bengkulu 2021 3.2580965 30.109864 3.437525
## 38 Lampung 2021 3.2580965 26.026816 3.296830
## 39 Kepulauan Bangka Belitung 2021 2.7725887 11.500930 2.525803
## 40 Kepulauan Riau 2021 2.7080502 4.794165 1.756851
## 41 Aceh 2022 4.7095302 68.250466 4.237730
## 42 Sumatera Utara 2022 4.2766661 51.241024 3.955868
## 43 Sumatera Barat 2022 3.2188758 29.838778 3.428773
## 44 Riau 2022 3.7376696 23.731942 3.208096
## 45 Jambi 2022 1.6094379 10.305894 2.425324
## 46 Sumatera Selatan 2022 3.6375862 36.425654 3.622356
## 47 Bengkulu 2022 3.6109179 20.871731 3.085195
## 48 Lampung 2022 3.6109179 22.227833 3.145351
## 49 Kepulauan Bangka Belitung 2022 1.9459101 13.496926 2.673937
## 50 Kepulauan Riau 2022 1.3862944 4.041084 1.617621
## 51 Aceh 2023 4.5849675 80.785997 4.404106
## 52 Sumatera Utara 2023 4.7273878 70.555282 4.270470
## 53 Sumatera Barat 2023 4.2904594 42.205770 3.765974
## 54 Riau 2023 4.3820266 26.551614 3.316061
## 55 Jambi 2023 3.2188758 20.562645 3.070962
## 56 Sumatera Selatan 2023 3.3672958 39.766657 3.707865
## 57 Bengkulu 2023 2.4849066 13.316314 2.661400
## 58 Lampung 2023 2.9957323 14.932931 2.768388
## 59 Kepulauan Bangka Belitung 2023 2.5649494 11.031076 2.487493
## 60 Kepulauan Riau 2023 2.4849066 8.366641 2.237155
## 61 Aceh 2024 3.8712010 55.846505 4.040355
## 62 Sumatera Utara 2024 4.7957905 61.796283 4.139896
## 63 Sumatera Barat 2024 4.1743873 43.271738 3.790346
## 64 Riau 2024 3.9889840 23.707079 3.207090
## 65 Jambi 2024 3.8066625 20.314912 3.059407
## 66 Sumatera Selatan 2024 4.2484952 40.910562 3.735538
## 67 Bengkulu 2024 2.3025851 14.015567 2.709087
## 68 Lampung 2024 3.3672958 26.518252 3.314849
## 69 Kepulauan Bangka Belitung 2024 1.0986123 4.672249 1.735586
## 70 Kepulauan Riau 2024 1.6094379 9.343070 2.336317
## Intercept t_X1 t_X2 t_X3 t_X4 t_X5
## 1 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 2 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 3 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 4 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 5 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 6 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 7 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 8 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 9 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 10 -4.1483374 0.130310254 -0.12131190 0.22773155 0.5272421 0.3050900
## 11 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 12 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 13 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 14 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 15 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 16 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 17 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 18 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 19 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 20 0.5002964 0.177687182 -0.06387269 -0.15358034 0.4571435 0.3647071
## 21 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 22 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 23 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 24 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 25 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 26 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 27 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 28 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 29 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 30 -0.3403638 -0.039513498 0.07270197 -0.22729929 0.3913107 0.5796486
## 31 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 32 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 33 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 34 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 35 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 36 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 37 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 38 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 39 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 40 -1.0064349 -0.046076266 0.11139242 -0.17727874 0.4368371 0.5044326
## 41 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 42 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 43 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 44 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 45 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 46 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 47 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 48 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 49 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 50 -1.8010842 -0.043403148 0.13143635 -0.06852675 0.4427078 0.5812448
## 51 -3.5022201 -0.043309946 0.15996023 -0.46049486 0.5345682 0.4054338
## 52 -3.4936776 -0.040824351 0.15299477 -0.41353730 0.5256066 0.4220981
## 53 -4.7307885 -0.023863759 0.14155127 -0.20957426 0.4778788 0.4749676
## 54 -4.4354278 -0.027195992 0.16158156 -0.26389153 0.5096599 0.4839044
## 55 -5.6880331 -0.014790812 0.16808558 0.03010426 0.4596898 0.6063385
## 56 -4.9290307 -0.011371478 0.17499477 0.06065537 0.4665250 0.6195179
## 57 -5.4580895 -0.012699302 0.17721749 0.07248529 0.4551193 0.6161083
## 58 -4.6637543 -0.008702085 0.18238948 0.07324742 0.4702701 0.6218774
## 59 -4.3650084 -0.005751744 0.18654138 0.06583895 0.4878031 0.6303307
## 60 -5.2884583 -0.009901962 0.17923211 0.02621739 0.4863543 0.6217946
## 61 -5.7934879 -0.009207933 0.11961625 -0.84941495 0.4633257 0.4416914
## 62 -5.9113889 -0.038117202 0.12847104 -0.81034204 0.4639040 0.4397379
## 63 -10.5418849 0.024720440 0.09286485 -0.41344297 0.3400786 0.5635756
## 64 -9.4751302 0.010875986 0.13160158 -0.43454574 0.4003785 0.5088350
## 65 -13.5648362 0.035295579 0.20596233 0.17489127 0.3407291 0.7683874
## 66 -11.0371163 0.036135160 0.19006687 0.18619805 0.3560816 0.7432371
## 67 -12.8402980 0.043473659 0.21324307 0.25818161 0.3230945 0.8016033
## 68 -8.5481840 0.042922947 0.17119490 0.06558917 0.3692664 0.6887225
## 69 -7.1877355 0.054554117 0.15857797 -0.01202371 0.3933600 0.6834535
## 70 -12.7216482 0.056466308 0.22061394 0.07343026 0.4212466 0.7547504
## t_X6 t_X7 residual time_stamp Stud_residual Intercept_SE
## 1 -0.236303167 1.0161031 0.1048135486 2018 0.2838348867 6.402864
## 2 -0.236303167 1.0161031 -0.4353619816 2018 -1.0502786249 6.402864
## 3 -0.236303167 1.0161031 0.2154048758 2018 0.4782016082 6.402864
## 4 -0.236303167 1.0161031 0.2319539013 2018 0.7842255438 6.402864
## 5 -0.236303167 1.0161031 -0.4365277194 2018 -1.0254388261 6.402864
## 6 -0.236303167 1.0161031 0.2762737339 2018 0.9444868495 6.402864
## 7 -0.236303167 1.0161031 0.0297832684 2018 0.0836385754 6.402864
## 8 -0.236303167 1.0161031 0.1634634245 2018 0.4358934054 6.402864
## 9 -0.236303167 1.0161031 -0.1746748952 2018 -0.4559147079 6.402864
## 10 -0.236303167 1.0161031 0.0248718437 2018 0.0704829835 6.402864
## 11 -0.310671676 0.4348776 0.1375337649 2019 0.3950687105 3.880453
## 12 -0.310671676 0.4348776 -0.4217692743 2019 -0.9491772017 3.880453
## 13 -0.310671676 0.4348776 -0.2065667104 2019 -0.4950774215 3.880453
## 14 -0.310671676 0.4348776 0.3918163093 2019 1.0179792694 3.880453
## 15 -0.310671676 0.4348776 -0.3013160953 2019 -0.6293645257 3.880453
## 16 -0.310671676 0.4348776 0.0286542833 2019 0.0664599169 3.880453
## 17 -0.310671676 0.4348776 0.2029725263 2019 0.4783607882 3.880453
## 18 -0.310671676 0.4348776 -0.3181439789 2019 -0.7479005200 3.880453
## 19 -0.310671676 0.4348776 0.1199932028 2019 0.2885078658 3.880453
## 20 -0.310671676 0.4348776 -0.5233034541 2019 -1.4528916624 3.880453
## 21 -0.166303531 0.2029834 0.0648325221 2020 0.1506503052 2.874736
## 22 -0.166303531 0.2029834 0.3371639692 2020 0.7721608897 2.874736
## 23 -0.166303531 0.2029834 0.7373211536 2020 1.5694191525 2.874736
## 24 -0.166303531 0.2029834 -0.3783816131 2020 -0.8385706651 2.874736
## 25 -0.166303531 0.2029834 0.5715822210 2020 1.2557160858 2.874736
## 26 -0.166303531 0.2029834 0.0378519682 2020 0.1790603748 2.874736
## 27 -0.166303531 0.2029834 -0.5082157600 2020 -1.3611980324 2.874736
## 28 -0.166303531 0.2029834 0.2181673031 2020 0.5610152233 2.874736
## 29 -0.166303531 0.2029834 -1.0038425675 2020 -2.3831933969 2.874736
## 30 -0.166303531 0.2029834 -0.0614087920 2020 -0.1488773767 2.874736
## 31 -0.158794831 0.3040694 -0.0384615979 2021 -0.0854157945 2.675039
## 32 -0.158794831 0.3040694 0.3522555885 2021 0.7660543303 2.675039
## 33 -0.158794831 0.3040694 -0.0003928973 2021 -0.0008653298 2.675039
## 34 -0.158794831 0.3040694 0.6153875697 2021 1.2647325516 2.675039
## 35 -0.158794831 0.3040694 0.0792710116 2021 0.1609848263 2.675039
## 36 -0.158794831 0.3040694 0.0916781199 2021 0.1888690589 2.675039
## 37 -0.158794831 0.3040694 -0.1794283896 2021 -0.4104562184 2.675039
## 38 -0.158794831 0.3040694 -0.0387330258 2021 -0.0929034292 2.675039
## 39 -0.158794831 0.3040694 0.2467856640 2021 0.5550516404 2.675039
## 40 -0.158794831 0.3040694 0.9511987740 2021 2.2357939152 2.675039
## 41 -0.172314201 0.2471590 0.4718003267 2022 0.9699304112 2.337303
## 42 -0.172314201 0.2471590 0.3207980412 2022 0.6887911816 2.337303
## 43 -0.172314201 0.2471590 -0.2098971094 2022 -0.4289368756 2.337303
## 44 -0.172314201 0.2471590 0.5295740003 2022 1.0791724403 2.337303
## 45 -0.172314201 0.2471590 -0.8158862264 2022 -1.7136757748 2.337303
## 46 -0.172314201 0.2471590 0.0152297456 2022 0.0325126583 2.337303
## 47 -0.172314201 0.2471590 0.5257229180 2022 1.1274509858 2.337303
## 48 -0.172314201 0.2471590 0.4655666740 2022 1.0493850901 2.337303
## 49 -0.172314201 0.2471590 -0.7280264636 2022 -1.6612899559 2.337303
## 50 -0.172314201 0.2471590 -0.2313268729 2022 -0.5258985690 2.337303
## 51 0.062906780 0.4944125 0.1808614322 2023 0.3977454706 2.426743
## 52 0.027926958 0.5234479 0.4569174990 2023 1.0207182466 2.374374
## 53 0.008008813 0.5691494 0.5244854009 2023 1.1215801098 2.688643
## 54 -0.019942203 0.5701876 1.0659655310 2023 2.2886782059 2.610434
## 55 -0.072414683 0.5055965 0.1479134238 2023 0.2984391004 2.612291
## 56 -0.107297057 0.3886397 -0.3405686862 2023 -0.7320486173 2.624905
## 57 -0.081126401 0.4208713 -0.1764930670 2023 -0.3870773491 2.633741
## 58 -0.117123977 0.3331286 0.2273441541 2023 0.6221440893 2.688308
## 59 -0.143528767 0.3185901 0.0774564010 2023 0.1745985478 2.716967
## 60 -0.111415183 0.4727476 0.2477520744 2023 0.5655469876 2.629127
## 61 0.209056034 0.9210994 -0.1691537312 2024 -0.3635353128 2.994647
## 62 0.190444876 0.9798242 0.6558946561 2024 1.4797390921 2.794905
## 63 0.218393562 1.2640367 0.3840407738 2024 1.0027513765 3.165394
## 64 0.166307224 1.2094013 0.7818942454 2024 1.8000465050 3.092494
## 65 0.126209612 1.0633642 0.7472555644 2024 1.5362822715 3.239651
## 66 0.077061795 0.7631363 0.5129573791 2024 1.0777358901 2.997770
## 67 0.114193525 0.8323688 -0.4065023640 2024 -0.8486733204 3.255108
## 68 0.058838829 0.5446981 0.0524463301 2024 0.1404003831 2.887772
## 69 0.003876675 0.4965764 -0.6369733356 2024 -1.5167546186 2.908904
## 70 0.066179623 1.0762599 -0.7268788172 2024 -2.0929876007 3.147781
## t_X1_SE t_X2_SE t_X3_SE t_X4_SE t_X5_SE t_X6_SE t_X7_SE
## 1 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 2 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 3 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 4 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 5 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 6 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 7 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 8 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 9 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 10 0.39548824 0.23837216 0.6312138 0.18508705 0.23780652 0.2820804 0.9894300
## 11 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 12 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 13 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 14 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 15 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 16 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 17 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 18 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 19 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 20 0.21721207 0.15294302 0.4965639 0.13204022 0.17631929 0.2443337 0.5271396
## 21 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 22 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 23 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 24 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 25 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 26 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 27 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 28 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 29 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 30 0.05431322 0.12291585 0.2846894 0.10223871 0.12743940 0.1558600 0.3994546
## 31 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 32 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 33 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 34 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 35 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 36 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 37 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 38 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 39 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 40 0.05267654 0.09551829 0.2284436 0.08554926 0.10848079 0.1207562 0.3686090
## 41 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 42 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 43 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 44 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 45 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 46 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 47 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 48 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 49 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 50 0.05219021 0.08482345 0.2022289 0.07729552 0.09583572 0.1075363 0.3203673
## 51 0.06835470 0.09989265 0.2600747 0.09197577 0.12211015 0.1204592 0.3531477
## 52 0.06182528 0.09616575 0.2492871 0.08800702 0.11792803 0.1120126 0.3366969
## 53 0.05467104 0.11522387 0.2549845 0.09596919 0.13392056 0.1407158 0.4222635
## 54 0.05674246 0.10793404 0.2460980 0.09633186 0.12564392 0.1352673 0.4033774
## 55 0.05195190 0.09759840 0.2021368 0.09572690 0.10758111 0.1365456 0.3898462
## 56 0.05242151 0.09508482 0.1995849 0.09625066 0.10535489 0.1359794 0.3868294
## 57 0.05240437 0.09642288 0.2012011 0.09619228 0.10646119 0.1365651 0.3891386
## 58 0.05291444 0.09576528 0.2030492 0.09760389 0.10682393 0.1372770 0.3917051
## 59 0.05288594 0.09649254 0.2041631 0.09916844 0.10753143 0.1382682 0.3935300
## 60 0.05203737 0.09634347 0.2009903 0.09851861 0.10585282 0.1376745 0.3930619
## 61 0.14279689 0.13491396 0.3119076 0.11191242 0.15656776 0.1565839 0.4477464
## 62 0.12884106 0.12849027 0.2925982 0.10496264 0.14511898 0.1407089 0.3989868
## 63 0.08263272 0.11883598 0.2984838 0.11562392 0.15376722 0.1437575 0.4621999
## 64 0.09651629 0.11973896 0.2793882 0.11791312 0.14452051 0.1537082 0.4543207
## 65 0.05227121 0.10501787 0.2900344 0.10606726 0.15340366 0.1293335 0.3970054
## 66 0.05477627 0.09405178 0.2267751 0.10833922 0.12189890 0.1291904 0.3831269
## 67 0.05420903 0.10514944 0.2700778 0.10461067 0.14761269 0.1307822 0.3944327
## 68 0.05494303 0.09112619 0.2171068 0.11161976 0.11447082 0.1309538 0.3925382
## 69 0.05495531 0.08959838 0.2108561 0.12163993 0.11167910 0.1341814 0.3928480
## 70 0.05445493 0.09836750 0.2609833 0.12771216 0.13920444 0.1569303 0.4322596
## Intercept_TV t_X1_TV t_X2_TV t_X3_TV t_X4_TV t_X5_TV
## 1 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 2 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 3 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 4 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 5 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 6 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 7 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 8 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 9 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 10 -0.6478878 0.32949211 -0.5089181 0.36078352 2.848617 1.282934
## 11 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 12 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 13 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 14 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 15 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 16 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 17 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 18 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 19 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 20 0.1289273 0.81803549 -0.4176241 -0.30928617 3.462154 2.068447
## 21 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 22 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 23 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 24 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 25 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 26 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 27 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 28 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 29 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 30 -0.1183983 -0.72751160 0.5914776 -0.79841144 3.827422 4.548426
## 31 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 32 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 33 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 34 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 35 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 36 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 37 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 38 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 39 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 40 -0.3762319 -0.87470185 1.1661894 -0.77602850 5.106264 4.649972
## 41 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 42 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 43 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 44 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 45 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 46 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 47 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 48 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 49 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 50 -0.7705822 -0.83163386 1.5495285 -0.33885736 5.727470 6.065012
## 51 -1.4431774 -0.63360601 1.6013213 -1.77062537 5.812055 3.320230
## 52 -1.4714097 -0.66031815 1.5909486 -1.65887956 5.972326 3.579286
## 53 -1.7595449 -0.43649729 1.2284891 -0.82190991 4.979502 3.546637
## 54 -1.6991150 -0.47928823 1.4970398 -1.07230272 5.290668 3.851395
## 55 -2.1774117 -0.28470203 1.7222166 0.14893017 4.802096 5.636105
## 56 -1.8777937 -0.21692388 1.8404070 0.30390757 4.846979 5.880295
## 57 -2.0723716 -0.24233288 1.8379196 0.36026294 4.731350 5.787163
## 58 -1.7348290 -0.16445578 1.9045470 0.36073738 4.818149 5.821518
## 59 -1.6065741 -0.10875752 1.9332208 0.32248208 4.918934 5.861827
## 60 -2.0114886 -0.19028561 1.8603452 0.13044108 4.936674 5.874143
## 61 -1.9346144 -0.06448273 0.8866114 -2.72328998 4.140074 2.821087
## 62 -2.1150591 -0.29584669 0.9998503 -2.76947042 4.419706 3.030189
## 63 -3.3303551 0.29916045 0.7814540 -1.38514363 2.941247 3.665122
## 64 -3.0639124 0.11268550 1.0990706 -1.55534738 3.395538 3.520850
## 65 -4.1871289 0.67523938 1.9612122 0.60300184 3.212387 5.008925
## 66 -3.6817758 0.65968638 2.0208748 0.82106901 3.286728 6.097160
## 67 -3.9446613 0.80196345 2.0280000 0.95595279 3.088543 5.430450
## 68 -2.9601314 0.78122648 1.8786575 0.30210550 3.308253 6.016577
## 69 -2.4709428 0.99269956 1.7698753 -0.05702328 3.233807 6.119798
## 70 -4.0414657 1.03693665 2.2427524 0.28136000 3.298407 5.421885
## t_X6_TV t_X7_TV coords.x1 coords.x2
## 1 -0.83771574 1.0269580 97.02530 4.36855
## 2 -0.83771574 1.0269580 99.38122 2.19235
## 3 -0.83771574 1.0269580 100.07610 -1.34225
## 4 -0.83771574 1.0269580 101.54758 0.50041
## 5 -0.83771574 1.0269580 102.77970 -1.61157
## 6 -0.83771574 1.0269580 104.09306 -3.12668
## 7 -0.83771574 1.0269580 102.53598 -3.51868
## 8 -0.83771574 1.0269580 105.02730 -4.85550
## 9 -0.83771574 1.0269580 107.58394 -2.75775
## 10 -0.83771574 1.0269580 104.58037 -0.15478
## 11 -1.27150574 0.8249762 97.02530 4.36855
## 12 -1.27150574 0.8249762 99.38122 2.19235
## 13 -1.27150574 0.8249762 100.07610 -1.34225
## 14 -1.27150574 0.8249762 101.54758 0.50041
## 15 -1.27150574 0.8249762 102.77970 -1.61157
## 16 -1.27150574 0.8249762 104.09306 -3.12668
## 17 -1.27150574 0.8249762 102.53598 -3.51868
## 18 -1.27150574 0.8249762 105.02730 -4.85550
## 19 -1.27150574 0.8249762 107.58394 -2.75775
## 20 -1.27150574 0.8249762 104.58037 -0.15478
## 21 -1.06700601 0.5081514 97.02530 4.36855
## 22 -1.06700601 0.5081514 99.38122 2.19235
## 23 -1.06700601 0.5081514 100.07610 -1.34225
## 24 -1.06700601 0.5081514 101.54758 0.50041
## 25 -1.06700601 0.5081514 102.77970 -1.61157
## 26 -1.06700601 0.5081514 104.09306 -3.12668
## 27 -1.06700601 0.5081514 102.53598 -3.51868
## 28 -1.06700601 0.5081514 105.02730 -4.85550
## 29 -1.06700601 0.5081514 107.58394 -2.75775
## 30 -1.06700601 0.5081514 104.58037 -0.15478
## 31 -1.31500365 0.8249104 97.02530 4.36855
## 32 -1.31500365 0.8249104 99.38122 2.19235
## 33 -1.31500365 0.8249104 100.07610 -1.34225
## 34 -1.31500365 0.8249104 101.54758 0.50041
## 35 -1.31500365 0.8249104 102.77970 -1.61157
## 36 -1.31500365 0.8249104 104.09306 -3.12668
## 37 -1.31500365 0.8249104 102.53598 -3.51868
## 38 -1.31500365 0.8249104 105.02730 -4.85550
## 39 -1.31500365 0.8249104 107.58394 -2.75775
## 40 -1.31500365 0.8249104 104.58037 -0.15478
## 41 -1.60238130 0.7714864 97.02530 4.36855
## 42 -1.60238130 0.7714864 99.38122 2.19235
## 43 -1.60238130 0.7714864 100.07610 -1.34225
## 44 -1.60238130 0.7714864 101.54758 0.50041
## 45 -1.60238130 0.7714864 102.77970 -1.61157
## 46 -1.60238130 0.7714864 104.09306 -3.12668
## 47 -1.60238130 0.7714864 102.53598 -3.51868
## 48 -1.60238130 0.7714864 105.02730 -4.85550
## 49 -1.60238130 0.7714864 107.58394 -2.75775
## 50 -1.60238130 0.7714864 104.58037 -0.15478
## 51 0.52222492 1.4000162 97.02530 4.36855
## 52 0.24931978 1.5546560 99.38122 2.19235
## 53 0.05691482 1.3478538 100.07610 -1.34225
## 54 -0.14742816 1.4135339 101.54758 0.50041
## 55 -0.53033323 1.2969126 102.77970 -1.61157
## 56 -0.78906844 1.0046798 104.09306 -3.12668
## 57 -0.59404949 1.0815458 102.53598 -3.51868
## 58 -0.85319454 0.8504579 105.02730 -4.85550
## 59 -1.03804612 0.8095702 107.58394 -2.75775
## 60 -0.80926528 1.2027307 104.58037 -0.15478
## 61 1.33510565 2.0571899 97.02530 4.36855
## 62 1.35346695 2.4557808 99.38122 2.19235
## 63 1.51917991 2.7348266 100.07610 -1.34225
## 64 1.08196700 2.6619993 101.54758 0.50041
## 65 0.97584640 2.6784627 102.77970 -1.61157
## 66 0.59649767 1.9918631 104.09306 -3.12668
## 67 0.87315778 2.1102936 102.53598 -3.51868
## 68 0.44930970 1.3876309 105.02730 -4.85550
## 69 0.02889129 1.2640420 107.58394 -2.75775
## 70 0.42171355 2.4898461 104.58037 -0.15478
# ==============================================================================
# BAGIAN 5: ANALISIS SPASIAL LANJUTAN (KOEFISIEN, LOCAL R2, RESIDUAL)
# ==============================================================================
# Ambil hasil GTWR Adaptive
df_gtwr <- as.data.frame(gtwr_adapt$SDF)
# Tempelkan atribut asli (Provinsi, Tahun) untuk plotting
# Kita asumsikan urutan data di gtwr sama dengan df_mod (standard GWmodel)
df_gtwr$Provinsi <- df_mod$Provinsi
df_gtwr$PROV <- toupper(df_mod$Provinsi)
df_gtwr$Tahun <- df_mod$Tahun
df_gtwr$lon <- df_mod$lon
df_gtwr$lat <- df_mod$lat
df_gtwr$Y_act <- df_mod$Y # Nilai Aktual (Count)
df_gtwr$Y_pred <- gtwr_adapt$SDF$pred_count # Nilai Prediksi (Count)
# Koefisien Beta di sini adalah elastisitas (karena model Log-Log).
# Interpretasi: Kenaikan 1% di X menaikkan Y sebesar Beta %.
# A. Identifikasi Kolom Koefisien
# Nama kolom biasanya: Intercept, t_X1, t_X2, dst.
coef_vars <- c("Intercept", paste0("t_", pred_use))
# Bersihkan nama kolom dari karakter aneh (misal kurung)
colnames(df_gtwr) <- gsub("\\(|\\)", "", colnames(df_gtwr))
coef_vars_clean <- gsub("\\(|\\)", "", coef_vars)
# Pastikan hanya memplot variabel yang ada di hasil model
present_vars <- coef_vars_clean[coef_vars_clean %in% colnames(df_gtwr)]
# B. Fungsi Plotting Otomatis
plot_coef_map <- function(var_beta, tahun = target_year) {
# Filter Data Tahun Target & Agregasi per Provinsi
dat_beta <- df_gtwr %>%
filter(Tahun == tahun) %>%
group_by(PROV) %>%
summarise(beta = mean(.data[[var_beta]], na.rm=TRUE), .groups="drop")
# Join ke Peta
map_beta <- map_sumatera_base %>%
left_join(dat_beta, by = c("PROV_MATCH" = "PROV"))
# Judul Variabel yang lebih enak dibaca
judul_var <- gsub("t_", "", var_beta) # Hapus prefix 't_'
# Plot
p <- ggplot(map_beta) +
geom_sf(aes(fill = beta), color = "white", linewidth = 0.4) +
# Label Provinsi
geom_text(data = label_df, aes(x = lon, y = lat, label = label),
inherit.aes = FALSE, color = "black", size = 3,
fontface = "bold", check_overlap = TRUE) +
# Skala Warna Divergen (Merah = Positif, Biru = Negatif)
scale_fill_gradient2(
low = "#2166ac", mid = "#f7f7f7", high = "#b2182b",
midpoint = 0, name = "Nilai Beta"
) +
labs(
title = paste0("Peta Koefisien Lokal — Variabel ", judul_var),
subtitle = paste0("Tahun: ", tahun, " (Merah = Pengaruh Positif, Biru = Pengaruh Negatif)"),
x = NULL, y = NULL
) +
theme_minimal(base_size = 11) +
theme(
plot.title = element_text(face = "bold"),
panel.grid = element_blank(),
axis.text = element_blank()
)
return(p)
}
# C. Loop Cetak Semua Peta Koefisien
cat("--- Mencetak Peta Koefisien Lokal ---\n")
## --- Mencetak Peta Koefisien Lokal ---
for (vb in present_vars) {
p_beta <- plot_coef_map(vb, tahun = target_year)
print(p_beta)
if (SAVE_VIS) {
safe_name <- gsub("[^A-Za-z0-9_]+", "_", vb)
ggsave(sprintf("GTWR_Beta_%s_%d.png", safe_name, target_year),
p_beta, width=10, height=8, dpi=300)
}
}








# Mencari kolom R2 (biasanya bernama 'localR2' atau 'Local_R2')
localR2_col <- names(df_gtwr)[grep("local.*R2", names(df_gtwr), ignore.case = TRUE)][1]
if (!is.na(localR2_col)) {
dat_r2 <- df_gtwr %>%
filter(Tahun == target_year) %>%
group_by(PROV) %>%
summarise(localR2 = mean(.data[[localR2_col]], na.rm=TRUE), .groups="drop")
map_r2 <- map_sumatera_base %>%
left_join(dat_r2, by = c("PROV_MATCH" = "PROV"))
p_r2 <- ggplot(map_r2) +
geom_sf(aes(fill = localR2), color = "white", linewidth = 0.4) +
geom_text(data = label_df, aes(x = lon, y = lat, label = label),
inherit.aes = FALSE, color = "black", size = 3,
fontface = "bold", check_overlap = TRUE) +
scale_fill_viridis_c(option = "cividis", name = "Local R²", na.value = "grey90") +
labs(
title = paste0("Kebaikan Model (Local R²) — Tahun ", target_year),
subtitle = "Semakin tinggi (kuning) = Model semakin akurat menjelaskan variasi di wilayah tersebut",
x = NULL, y = NULL
) +
theme_minimal(base_size = 11) +
theme(plot.title = element_text(face = "bold"), panel.grid = element_blank(),
axis.text = element_blank())
print(p_r2)
if (SAVE_VIS) ggsave(sprintf("GTWR_Local_R2_%d.png", target_year), p_r2, width=10, height=8, dpi=300)
}
# Residual = Aktual - Prediksi
# Positif (+) = Aktual lebih tinggi dari prediksi (Underprediction model)
# Negatif (-) = Aktual lebih rendah dari prediksi (Overprediction model)
res_year <- df_gtwr %>%
filter(Tahun == target_year) %>%
mutate(
resid_count = Y_act - Y_pred # Hitung selisih Count
) %>%
group_by(PROV) %>%
summarise(resid_count = sum(resid_count, na.rm=TRUE), .groups="drop")
map_res <- map_sumatera_base %>%
left_join(res_year, by = c("PROV_MATCH" = "PROV"))
p_resid <- ggplot(map_res) +
geom_sf(aes(fill = resid_count), color = "white", linewidth = 0.4) +
geom_text(data = label_df, aes(x = lon, y = lat, label = label),
inherit.aes = FALSE, color = "black", size = 3,
fontface = "bold", check_overlap = TRUE) +
# Skala Warna Divergen (Putih di 0)
scale_fill_gradient2(
low = "#d73027", mid = "white", high = "#4575b4", # Merah (Negatif) - Biru (Positif)
name = "Error (Jml)",
midpoint = 0,
labels = scales::comma
) +
labs(
title = paste0("Peta Residual (Error Prediksi) — Tahun ", target_year),
subtitle = "Biru = Kejadian > Prediksi (Under) | Merah = Kejadian < Prediksi (Over)",
x = NULL, y = NULL
) +
theme_minimal(base_size = 11) +
theme(plot.title = element_text(face = "bold"), panel.grid = element_blank(),
axis.text = element_blank())
print(p_resid)

if (SAVE_VIS) ggsave(sprintf("GTWR_Residual_Error_Count_%d.png", target_year), p_resid, width=10, height=8, dpi=300)
cat("\n--- Semua Proses Visualisasi Selesai ---\n")
##
## --- Semua Proses Visualisasi Selesai ---
# ==============================================================================
# BAGIAN 6: FORECASTING 2025-2030 (DATA COUNT)
# Metode: Forecast Variabel X (ETS/ARIMA) + Freeze Spatial Beta (GTWR Terakhir)
# ==============================================================================
future_years <- 2025:2030
# Pastikan list_sumatera ada (dari langkah awal)
prov_use <- list_sumatera
pred_vars <- paste0("X", 1:7)
last_year <- target_year # Menggunakan tahun terakhir sebagai basis Beta
# Hitung ulang Shift Factor agar transformasi masa depan SAMA PERSIS dengan training
shift_factors <- list()
for (v in pred_vars) {
vals <- df_mod[[v]] # Data asli
min_val <- min(vals, na.rm = TRUE)
if (min_val <= 0) {
shift_factors[[v]] <- abs(min_val) + 1
} else {
shift_factors[[v]] <- 0
}
}
# Data Historis untuk Time Series Forecasting
df_hist <- df_mod %>%
mutate(PROV = toupper(Provinsi)) %>%
filter(PROV %in% prov_use) %>%
select(PROV, Tahun, all_of(pred_vars)) %>%
arrange(PROV, Tahun)
# Fungsi Forecast (Otomatis memilih ETS atau ARIMA terbaik)
fc_1d <- function(x, h) {
x <- as.numeric(x); x <- x[is.finite(x)]
if (length(na.omit(x)) < 3) return(rep(NA_real_, h))
fit <- tryCatch(ets(ts(x, frequency=1)), error=function(e) NULL)
if (is.null(fit)) fit <- auto.arima(ts(x, frequency=1))
f <- forecast(fit, h=h)$mean
return(as.numeric(f))
}
cat("Melakukan peramalan variabel X untuk 2025-2030...\n")
## Melakukan peramalan variabel X untuk 2025-2030...
# Loop per Provinsi untuk memprediksi X masa depan
df_future_X <- map_dfr(prov_use, function(loc) {
rows_cc <- df_hist %>% filter(PROV == loc) %>% arrange(Tahun)
out <- tibble(PROV = loc, Tahun = future_years)
for (vx in pred_vars) {
fc_val <- fc_1d(rows_cc[[vx]], length(future_years))
# Penanganan khusus: Variabel fisik tidak boleh negatif (kecuali X1 Deforestasi Netto)
if (vx != "X1") { fc_val <- pmax(fc_val, 0) }
out[[vx]] <- fc_val
}
return(out)
})
for (vx in pred_vars) {
tx_name <- paste0("t_", vx)
shift <- shift_factors[[vx]]
# Terapkan shift lalu log
df_future_X[[tx_name]] <- log(df_future_X[[vx]] + shift)
}
# Menggunakan df_gtwr dari langkah visualisasi sebelumnya
# Pastikan nama kolom bersih dari karakter aneh
names(df_gtwr) <- gsub("\\(|\\)", "", names(df_gtwr))
# Cari nama kolom Intercept & Variabel t_X
intercept_col <- names(df_gtwr)[grepl("Intercept", names(df_gtwr), ignore.case = TRUE)][1]
beta_cols <- names(df_gtwr)[grepl("t_X", names(df_gtwr))]
# Ambil Beta tahun terakhir
betas_last <- df_gtwr %>%
mutate(PROV = toupper(Provinsi)) %>%
filter(Tahun == last_year, PROV %in% prov_use) %>%
select(PROV, all_of(intercept_col), all_of(beta_cols)) %>%
rename(b0 = !!intercept_col) %>%
# Jika ada duplikat (misal 1 provinsi ada banyak titik), ambil rata-ratanya
group_by(PROV) %>%
summarise(across(everything(), mean, na.rm=TRUE), .groups="drop")
## Warning: There was 1 warning in `summarise()`.
## ℹ In argument: `across(everything(), mean, na.rm = TRUE)`.
## ℹ In group 1: `PROV = "ACEH"`.
## Caused by warning:
## ! The `...` argument of `across()` is deprecated as of dplyr 1.1.0.
## Supply arguments directly to `.fns` through an anonymous function instead.
##
## # Previously
## across(a:b, mean, na.rm = TRUE)
##
## # Now
## across(a:b, \(x) mean(x, na.rm = TRUE))
# Rumus: Y_hat = exp( b0 + b1*X1 + ... ) - 1
pred_freeze_beta_count <- function(df_fut_one_prov, betas_one_prov) {
if (nrow(betas_one_prov) == 0) return(rep(NA_real_, nrow(df_fut_one_prov)))
# Ambil Intercept (b0)
b0 <- betas_one_prov$b0[1]
# Hitung Linear Predictor (Log Scale)
linear_pred <- rep(b0, nrow(df_fut_one_prov))
for (v in pred_vars) {
tx_name <- paste0("t_", v) # Nama kolom data (misal t_X1)
# Cari kolom beta yang namanya mengandung t_X1
beta_col <- names(betas_one_prov)[grepl(paste0(tx_name, "$"), names(betas_one_prov))][1]
if (!is.na(beta_col)) {
b_val <- betas_one_prov[[beta_col]]
x_val <- df_fut_one_prov[[tx_name]]
linear_pred <- linear_pred + (x_val * b_val)
}
}
# --- PERBAIKAN DI SINI UNTUK COUNT DATA ---
# Mengembalikan dari Log ke Count: exp(Y) - 1
y_count <- exp(linear_pred) - 1
# Pastikan tidak ada nilai negatif (secara teori exp selalu positif, tapi -1 bisa membuatnya negatif kecil)
y_count <- pmax(y_count, 0)
return(y_count)
}
# Jalankan perhitungan
df_pred_final <- df_future_X %>%
group_by(PROV) %>%
group_modify(~{
b_row <- betas_last %>% filter(PROV == .y$PROV)
tibble(Tahun = .x$Tahun,
Y_pred_count = pred_freeze_beta_count(.x, b_row)) # Hasil Count
}) %>%
ungroup() %>%
mutate(Method = paste0("GTWR_FreezeBeta_", last_year))
# Tambahkan Koordinat untuk Peta
coords_sumatera <- df_mod %>%
mutate(PROV = toupper(Provinsi)) %>%
distinct(PROV, lon, lat)
df_pred_final <- df_pred_final %>%
left_join(coords_sumatera, by="PROV")
cat("\n=== CONTOH HASIL PREDIKSI (2025-2030) - COUNT DATA ===\n")
##
## === CONTOH HASIL PREDIKSI (2025-2030) - COUNT DATA ===
print(head(df_pred_final, 10))
## # A tibble: 10 × 6
## PROV Tahun Y_pred_count Method lon lat
## <chr> <int> <dbl> <chr> <dbl> <dbl>
## 1 ACEH 2025 102. GTWR_FreezeBeta_2024 97.0 4.37
## 2 ACEH 2026 102. GTWR_FreezeBeta_2024 97.0 4.37
## 3 ACEH 2027 102. GTWR_FreezeBeta_2024 97.0 4.37
## 4 ACEH 2028 102. GTWR_FreezeBeta_2024 97.0 4.37
## 5 ACEH 2029 102. GTWR_FreezeBeta_2024 97.0 4.37
## 6 ACEH 2030 102. GTWR_FreezeBeta_2024 97.0 4.37
## 7 BENGKULU 2025 17.0 GTWR_FreezeBeta_2024 103. -3.52
## 8 BENGKULU 2026 17.0 GTWR_FreezeBeta_2024 103. -3.52
## 9 BENGKULU 2027 17.0 GTWR_FreezeBeta_2024 103. -3.52
## 10 BENGKULU 2028 17.0 GTWR_FreezeBeta_2024 103. -3.52
# Simpan CSV
file_name <- "Hasil_Forecast_Banjir_Sumatera_2025_2030_Count.csv"
write.csv(df_pred_final, file_name, row.names = FALSE)
cat(paste("\nFile berhasil disimpan:", file_name, "\n"))
##
## File berhasil disimpan: Hasil_Forecast_Banjir_Sumatera_2025_2030_Count.csv
# A. Siapkan Data Historis (df_mod)
hist_plot <- df_mod %>%
mutate(PROV = toupper(Provinsi),
Type = "Data Historis (Aktual)") %>%
select(PROV, Tahun, Y, Type) %>%
rename(Jumlah_Banjir = Y)
# B. Siapkan Data Forecast (df_pred_final)
# Pastikan menggunakan kolom Y_pred_count (bukan persen)
fut_plot <- df_pred_final %>%
mutate(Type = "Proyeksi (2025-2030)") %>%
select(PROV, Tahun, Y_pred_count, Type) %>%
rename(Jumlah_Banjir = Y_pred_count)
# C. Gabungkan Kedua Data
combined_plot <- bind_rows(hist_plot, fut_plot)
# D. Buat Plot Line Chart (Facet per Provinsi)
p_proyeksi <- ggplot(combined_plot, aes(x = Tahun, y = Jumlah_Banjir, color = Type)) +
# Garis dan Titik
geom_line(linewidth = 1) +
geom_point(size = 1.5) +
# Garis putus-putus vertikal penanda batas data asli & prediksi
geom_vline(xintercept = last_year, linetype = "dashed", color = "gray50") +
# Facet: Pisahkan grafik per Provinsi agar tidak tumpang tindih
facet_wrap(~ PROV, scales = "free_y", ncol = 3) +
# Pewarnaan
scale_color_manual(values = c("Data Historis (Aktual)" = "#333333",
"Proyeksi (2025-2030)" = "#E41A1C")) +
# Format Sumbu Y (Angka biasa, bukan sains)
scale_y_continuous(labels = scales::comma) +
# Label dan Judul
labs(
title = "Proyeksi Kejadian Banjir Pulau Sumatera (2025-2030)",
subtitle = paste0("Metode: GTWR Spatial Freeze (Basis Beta Th. ", last_year, ") + Forecast Variabel X"),
y = "Jumlah Kejadian Banjir",
x = NULL,
color = "Keterangan"
) +
# Tema Visual
theme_minimal(base_size = 11) +
theme(
plot.title = element_text(face = "bold", size = 14),
strip.text = element_text(face = "bold", size = 10), # Judul per kotak provinsi
legend.position = "top",
panel.border = element_rect(color = "grey80", fill = NA), # Kotak pembatas grafis
axis.text.x = element_text(angle = 45, hjust = 1)
)
# E. Tampilkan dan Simpan
print(p_proyeksi)

# Simpan Grafik High Resolution
ggsave("Grafik_Proyeksi_Banjir_Sumatera_2025_2030.png",
p_proyeksi, width = 14, height = 10, dpi = 300)
cat("Visualisasi selesai. Grafik disimpan sebagai 'Grafik_Proyeksi_Banjir_Sumatera_2025_2030.png'\n")
## Visualisasi selesai. Grafik disimpan sebagai 'Grafik_Proyeksi_Banjir_Sumatera_2025_2030.png'
# ==============================================================================
# BAGIAN 7: VISUALISASI HASIL PREDIKSI 2030 & UJI AUTOKORELASI SPASIAL
# ==============================================================================
# --- 1. Persiapan Data Forecast 2030 ---
# Ambil hasil prediksi tahun 2030 dari langkah sebelumnya (df_pred_final)
# Pastikan menggunakan kolom Y_pred_count (Count Data)
pred_2030 <- df_pred_final %>%
filter(Tahun == 2030) %>%
select(PROV, Y_pred_count) %>%
mutate(Y_pred_count = as.numeric(Y_pred_count))
# --- 2. PETA KOROPLET (PREDIKSI 2030) ---
# Gabungkan data prediksi dengan Peta Dasar Sumatera
map_pred2030 <- map_sumatera_base %>%
left_join(pred_2030, by = c("PROV_MATCH" = "PROV"))
p_map2030 <- ggplot(map_pred2030) +
# Layer Peta
geom_sf(aes(fill = Y_pred_count), color = "white", linewidth = 0.4) +
# Layer Label (Nama Provinsi)
geom_text(
data = label_df,
aes(x = lon, y = lat, label = label),
inherit.aes = FALSE, color = "black", size = 3, fontface = "bold", check_overlap = TRUE
) +
# Skala Warna (Count Data)
scale_fill_viridis_c(
option = "plasma",
na.value = "grey90",
name = "Est. Jumlah",
labels = scales::comma # Format angka ribuan (bukan desimal persen)
) +
# Judul & Tampilan
labs(
title = "Peta Prediksi Kejadian Banjir — Tahun 2030",
subtitle = "Wilayah: Pulau Sumatera • Metode: GTWR Forecast (Freeze Beta)",
x = NULL, y = NULL
) +
theme_minimal(base_size = 11) +
theme(plot.title = element_text(face = "bold"),
panel.grid = element_blank(),
axis.text = element_blank())
print(p_map2030)

if (SAVE_VIS) ggsave("Peta_Prediksi_Banjir_2030_Sumatera_Count.png", p_map2030, width = 10, height = 8, dpi = 300)
# --- 3. BAR CHART (PERINGKAT RISIKO BANJIR 2030) ---
# Urutkan provinsi dari prediksi banjir tertinggi ke terendah
p_bar2030 <- pred_2030 %>%
mutate(PROV = reorder(PROV, Y_pred_count)) %>%
ggplot(aes(x = PROV, y = Y_pred_count, fill = Y_pred_count)) +
geom_col(width = 0.7, color = "black", linewidth = 0.4) +
coord_flip() + # Memutar grafik jadi horizontal
# Format Sumbu Y (Angka Bulat)
scale_y_continuous(labels = scales::comma) +
scale_fill_viridis_c(option = "plasma", name = "Jumlah") +
labs(
title = "Peringkat Risiko Banjir per Provinsi (Tahun 2030)",
subtitle = "Estimasi Jumlah Kejadian (Count)",
x = NULL, y = "Jumlah Kejadian Banjir"
) +
theme_minimal(base_size = 12) +
theme(
plot.title = element_text(face = "bold"),
legend.position = "right",
panel.grid.minor = element_blank()
)
print(p_bar2030)

if (SAVE_VIS) ggsave("BarChart_Prediksi_Banjir_2030_Count.png", p_bar2030, width = 8, height = 6, dpi = 300)
# --- 4. MORAN'S I (UJI CLUSTER SPASIAL PADA RESIDUAL & KOEFISIEN) ---
# Menguji apakah error model atau pengaruh variabel mengelompok secara spasial?
cat("\n--- ANALISIS AUTOKORELASI SPASIAL (MORAN'S I) ---\n")
##
## --- ANALISIS AUTOKORELASI SPASIAL (MORAN'S I) ---
# A. Siapkan Data (Gunakan df_gtwr dari Bagian 5)
# Kita hitung residual count dulu jika belum ada
target_data_moran <- df_gtwr %>%
filter(Tahun == target_year) %>%
mutate(
PROV = toupper(Provinsi),
resid_count = Y_act - Y_pred # Pastikan kolom ini ada (Actual - Pred)
) %>%
arrange(PROV) %>% # PENTING: Urutkan abjad agar sinkron dengan matriks tetangga
# Pastikan hanya 1 baris per provinsi (handle jika ada duplikat)
distinct(PROV, .keep_all = TRUE)
# B. Membuat Matriks Pembobot (Spatial Weights)
# Menggunakan K-Nearest Neighbors (k=3 cukup untuk 10 provinsi Sumatera)
coords_matrix <- cbind(target_data_moran$lon, target_data_moran$lat)
knn <- spdep::knearneigh(coords_matrix, k=3)
nb <- spdep::knn2nb(knn)
lw <- spdep::nb2listw(nb, style="W") # Row-standardized weights
# C. Fungsi Uji Moran
run_moran <- function(var_name, data_vec) {
test <- moran.test(data_vec, lw)
cat(paste0("\n>>> Moran's I untuk variabel: ", var_name, "\n"))
print(test)
# Interpretasi Singkat
pval <- test$p.value
if(pval < 0.05) {
cat(" [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).\n")
} else {
cat(" [OK] Pola Spasial Acak (Tidak ada clustering signifikan).\n")
}
}
# D. Eksekusi Uji
# 1. Uji Residual (Penting untuk asumsi model)0
run_moran("Residual (Error Count)", target_data_moran$resid_count)
##
## >>> Moran's I untuk variabel: Residual (Error Count)
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = -0.05336, p-value = 0.5213
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## -0.12047927 -0.11111111 0.03082357
##
## [OK] Pola Spasial Acak (Tidak ada clustering signifikan).
# 2. Uji Koefisien (Opsional: Melihat pola penyebaran pengaruh variabel)
# Cek nama kolom koefisien yang tersedia (misal t_X1, t_X2...)
coef_cols <- names(target_data_moran)[grepl("t_X", names(target_data_moran))]
for(v in coef_cols) {
run_moran(paste0("Koefisien ", v), target_data_moran[[v]])
}
##
## >>> Moran's I untuk variabel: Koefisien t_X1
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 3.4223, p-value = 0.0003104
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.46651879 -0.11111111 0.02848732
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X2
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 2.9583, p-value = 0.001547
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.44489411 -0.11111111 0.03532436
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X3
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 4.1968, p-value = 1.354e-05
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.66940831 -0.11111111 0.03458875
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X4
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 2.4178, p-value = 0.007807
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.33742821 -0.11111111 0.03441524
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X5
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 4.0117, p-value = 3.014e-05
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.64901477 -0.11111111 0.03590155
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X6
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 3.6076, p-value = 0.0001545
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.56061776 -0.11111111 0.03467006
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X7
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 3.1505, p-value = 0.0008149
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.46643026 -0.11111111 0.03360499
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X1_SE
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 3.963, p-value = 3.701e-05
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.59339722 -0.11111111 0.03160296
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X2_SE
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 4.0204, p-value = 2.906e-05
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.64099855 -0.11111111 0.03499719
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X3_SE
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 3.2523, p-value = 0.0005724
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.49757517 -0.11111111 0.03502752
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X4_SE
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 0.69099, p-value = 0.2448
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.01290561 -0.11111111 0.03221150
##
## [OK] Pola Spasial Acak (Tidak ada clustering signifikan).
##
## >>> Moran's I untuk variabel: Koefisien t_X5_SE
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 2.7387, p-value = 0.003084
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.39703720 -0.11111111 0.03442722
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X6_SE
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 2.0578, p-value = 0.0198
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.27973291 -0.11111111 0.03607285
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X7_SE
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 2.1727, p-value = 0.0149
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.3016983 -0.1111111 0.0361006
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X1_TV
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 3.6368, p-value = 0.000138
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.56195172 -0.11111111 0.03425005
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X2_TV
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 3.7344, p-value = 9.409e-05
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.60574646 -0.11111111 0.03684929
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X3_TV
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 4.2611, p-value = 1.017e-05
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.68675562 -0.11111111 0.03505971
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X4_TV
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 2.1119, p-value = 0.01735
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.2379998 -0.1111111 0.0273256
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X5_TV
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 4.298, p-value = 8.619e-06
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.71106618 -0.11111111 0.03659356
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X6_TV
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 3.338, p-value = 0.0004219
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.50398027 -0.11111111 0.03395447
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
##
## >>> Moran's I untuk variabel: Koefisien t_X7_TV
##
## Moran I test under randomisation
##
## data: data_vec
## weights: lw
##
## Moran I statistic standard deviate = 2.9421, p-value = 0.00163
## alternative hypothesis: greater
## sample estimates:
## Moran I statistic Expectation Variance
## 0.42241324 -0.11111111 0.03288414
##
## [!] TERDAPAT Autokorelasi Spasial Signifikan (Data Mengelompok).
cat("\n--- Seluruh Proses Selesai ---\n")
##
## --- Seluruh Proses Selesai ---
# ============================================================
# BAGIAN 8: UJI MORAN'S I (OTOMATIS SEMUA VARIABEL)
# & VISUALISASI EVOLUSI BETA TAHUNAN
# ============================================================
# --- A. UJI MORAN'S I OTOMATIS (SEMUA VARIABEL X) ---
# 1. Deteksi Variabel Beta secara Otomatis
# Mencari kolom yang berawalan "t_X" di data target (tahun terakhir)
vars_to_test <- names(target_data_moran)[grepl("t_X", names(target_data_moran))]
# Hapus kolom yang bukan koefisien murni (misal: Standard Error / T-Value)
vars_to_test <- vars_to_test[!grepl("_SE$|_TV$", vars_to_test)]
cat("\nVariabel yang akan diuji Moran's I:", paste(vars_to_test, collapse=", "), "\n")
##
## Variabel yang akan diuji Moran's I: t_X1, t_X2, t_X3, t_X4, t_X5, t_X6, t_X7
# Siapkan wadah hasil
results_list <- list()
cat("\n--- MULAI PENGUJIAN MORAN'S I (LOOPING) ---\n")
##
## --- MULAI PENGUJIAN MORAN'S I (LOOPING) ---
# 2. Loop Pengujian
for (coef_name in vars_to_test) {
# Ambil data Beta
x_beta <- target_data_moran[[coef_name]]
# Pastikan data numerik dan tidak NA
if(all(is.na(x_beta))) {
cat(sprintf("Skip %s (Data Kosong)\n", coef_name))
next
}
# Jalankan Tes Moran's I
# (Pastikan objek 'lw' sudah ada dari langkah sebelumnya)
moran_result <- spdep::moran.test(x_beta, lw, alternative="two.sided")
# Ambil Statistik
moran_stat <- moran_result$estimate[["Moran I statistic"]]
pval <- moran_result$p.value
# Interpretasi
kesimpulan <- ifelse(pval < 0.05, "Signifikan (Cluster)", "Acak (Random)")
# Print Status
cat(sprintf("Var: %-10s | Moran I: %6.4f | P-val: %.5f | -> %s\n",
coef_name, moran_stat, pval, kesimpulan))
# Simpan ke List
results_list[[coef_name]] <- data.frame(
Variabel = coef_name,
Moran_I = moran_stat,
P_Value = pval,
Signifikansi = kesimpulan,
row.names = NULL
)
# Simpan Plot Moran (Scatter Plot)
if (SAVE_VIS) {
safe_name <- gsub("[^A-Za-z0-9_]+", "_", coef_name)
png_filename <- sprintf("MoranPlot_%s_%d.png", safe_name, target_year)
png(png_filename, width=800, height=600)
# Label singkatan provinsi untuk plot
label_moran <- label_df$label[match(target_data_moran$PROV, toupper(label_df$Provinsi))]
spdep::moran.plot(x_beta, lw,
labels = label_moran,
pch = 19, cex = 1.0, col = "red",
main = paste0("Moran’s I Plot: ", coef_name, " (Tahun ", target_year, ")"),
xlab = paste("Nilai Beta", coef_name, "(Log-Scale)"),
ylab = "Spatially Lagged Beta (Rata-rata Tetangga)")
dev.off()
}
}
## Var: t_X1 | Moran I: 0.4665 | P-val: 0.00062 | -> Signifikan (Cluster)
## Var: t_X2 | Moran I: 0.4449 | P-val: 0.00309 | -> Signifikan (Cluster)
## Var: t_X3 | Moran I: 0.6694 | P-val: 0.00003 | -> Signifikan (Cluster)
## Var: t_X4 | Moran I: 0.3374 | P-val: 0.01561 | -> Signifikan (Cluster)
## Var: t_X5 | Moran I: 0.6490 | P-val: 0.00006 | -> Signifikan (Cluster)
## Var: t_X6 | Moran I: 0.5606 | P-val: 0.00031 | -> Signifikan (Cluster)
## Var: t_X7 | Moran I: 0.4664 | P-val: 0.00163 | -> Signifikan (Cluster)
# 3. Rekapitulasi Hasil
final_moran_table <- do.call(rbind, results_list)
# Rapihkan Angka
final_moran_table$Moran_I <- round(final_moran_table$Moran_I, 4)
final_moran_table$P_Value <- round(final_moran_table$P_Value, 5)
cat("\n=======================================================\n")
##
## =======================================================
cat(" TABEL RINGKASAN AUTOKORELASI SPASIAL (BETA) \n")
## TABEL RINGKASAN AUTOKORELASI SPASIAL (BETA)
cat("=======================================================\n")
## =======================================================
print(final_moran_table)
## Variabel Moran_I P_Value Signifikansi
## t_X1 t_X1 0.4665 0.00062 Signifikan (Cluster)
## t_X2 t_X2 0.4449 0.00309 Signifikan (Cluster)
## t_X3 t_X3 0.6694 0.00003 Signifikan (Cluster)
## t_X4 t_X4 0.3374 0.01561 Signifikan (Cluster)
## t_X5 t_X5 0.6490 0.00006 Signifikan (Cluster)
## t_X6 t_X6 0.5606 0.00031 Signifikan (Cluster)
## t_X7 t_X7 0.4664 0.00163 Signifikan (Cluster)
if (SAVE_DATA) {
write.csv(final_moran_table, "Hasil_Uji_Moran_Lengkap.csv", row.names = FALSE)
cat("\nFile tersimpan: 'Hasil_Uji_Moran_Lengkap.csv'\n")
}
##
## File tersimpan: 'Hasil_Uji_Moran_Lengkap.csv'
# --- B. VISUALISASI EVOLUSI BETA PER TAHUN (TIME-SERIES MAPS) ---
# Membuat peta per tahun untuk setiap variabel X
cat("\n--- MEMULAI PEMBUATAN PETA TREN BETA (PER TAHUN) ---\n")
##
## --- MEMULAI PEMBUATAN PETA TREN BETA (PER TAHUN) ---
years_seq <- sort(unique(df_gtwr$Tahun))
# Gunakan daftar variabel yang sama dengan di atas
coef_vars <- vars_to_test
for (vb in coef_vars) {
cat(paste0(">>> Memproses Peta Variabel: ", vb, " ...\n"))
for (yy in years_seq) {
# 1. Siapkan Data per Tahun
dat_beta <- df_gtwr %>%
filter(Tahun == yy) %>%
mutate(PROV = toupper(Provinsi)) %>%
group_by(PROV) %>%
summarise(beta = mean(.data[[vb]], na.rm=TRUE), .groups="drop")
# 2. Join ke Peta Dasar
map_beta <- map_sumatera_base %>%
left_join(dat_beta, by = c("PROV_MATCH" = "PROV"))
# 3. Plotting
# Catatan: Kita memplot nilai Beta asli (Log Scale).
# Untuk interpretasi Count Data: Exp(Beta) adalah Risk Ratio.
# Kita biarkan Beta asli agar akurat secara matematis.
p_beta_year <- ggplot(map_beta) +
geom_sf(aes(fill = beta), color = "white", linewidth = 0.3) +
geom_text(data = label_df, aes(x = lon, y = lat, label = label),
inherit.aes = FALSE, color = "black", size = 2.5,
fontface = "bold", check_overlap = TRUE) +
scale_fill_viridis_c(
option = "viridis",
name = "Nilai Beta", # Label netral
na.value = "grey90"
) +
labs(
title = paste0("Sebaran Koefisien GTWR: ", vb),
subtitle = paste0("Tahun: ", yy, " • Satuan: Log-Risk (Count Model)"),
x = NULL, y = NULL
) +
theme_minimal(base_size = 10) +
theme(
plot.title = element_text(face = "bold"),
panel.grid = element_blank(),
axis.text = element_blank()
)
# 4. Simpan
if (SAVE_VIS) {
safe_var_name <- gsub("[^A-Za-z0-9_]+", "_", vb)
filename <- sprintf("Peta_Beta_%s_%d.png", safe_var_name, yy)
# Suppress message agar console tidak penuh
suppressMessages(
ggsave(filename, p_beta_year, width = 8, height = 6, dpi = 200)
)
}
}
cat(paste(" -> Selesai tahun", min(years_seq), "s/d", max(years_seq), "\n"))
}
## >>> Memproses Peta Variabel: t_X1 ...
## -> Selesai tahun 2018 s/d 2024
## >>> Memproses Peta Variabel: t_X2 ...
## -> Selesai tahun 2018 s/d 2024
## >>> Memproses Peta Variabel: t_X3 ...
## -> Selesai tahun 2018 s/d 2024
## >>> Memproses Peta Variabel: t_X4 ...
## -> Selesai tahun 2018 s/d 2024
## >>> Memproses Peta Variabel: t_X5 ...
## -> Selesai tahun 2018 s/d 2024
## >>> Memproses Peta Variabel: t_X6 ...
## -> Selesai tahun 2018 s/d 2024
## >>> Memproses Peta Variabel: t_X7 ...
## -> Selesai tahun 2018 s/d 2024
cat("\n--- MEMULAI PEMBUATAN PETA TREN BETA (TIME-SERIES) ---\n")
##
## --- MEMULAI PEMBUATAN PETA TREN BETA (TIME-SERIES) ---
# 1. Persiapan Awal
# Ambil daftar tahun yang tersedia di data
years_seq <- sort(unique(df_gtwr$Tahun))
# Tentukan variabel yang akan di-plot secara otomatis
# Mencari kolom yang berawalan "t_X" di data frame hasil GTWR
coef_vars <- names(df_gtwr)[grepl("t_X", names(df_gtwr))]
# Hapus jika ada kolom suffix asing (misal _SE untuk standard error, _TV untuk t-value)
coef_vars <- coef_vars[!grepl("_SE$|_TV$", coef_vars)]
cat("Variabel yang akan diproses:", paste(coef_vars, collapse=", "), "\n")
## Variabel yang akan diproses: t_X1, t_X2, t_X3, t_X4, t_X5, t_X6, t_X7
cat("Rentang Tahun:", min(years_seq), "s/d", max(years_seq), "\n")
## Rentang Tahun: 2018 s/d 2024
cat("Total gambar yang akan dibuat:", length(coef_vars) * length(years_seq), "\n\n")
## Total gambar yang akan dibuat: 49
# 2. Loop Utama (Iterasi per Variabel)
for (vb in coef_vars) {
# Buat nama variabel yang aman untuk nama file (hapus karakter aneh jika ada)
safe_var_name <- gsub("[^A-Za-z0-9_]+", "_", vb)
cat(paste0(">>> Memproses Peta untuk Variabel: ", vb, " <<<\n"))
# 3. Loop Dalam (Iterasi per Tahun)
for (yy in years_seq) {
# A. Siapkan Data per Tahun
# Filter data tahun 'yy', standarisasi nama provinsi, lalu ambil nilai Beta-nya
dat_beta <- df_gtwr %>%
filter(Tahun == yy) %>%
mutate(PROV = toupper(Provinsi)) %>%
# Group by untuk memastikan 1 nilai per provinsi (mengatasi jika ada duplikat di input)
group_by(PROV) %>%
summarise(beta = mean(.data[[vb]], na.rm=TRUE), .groups="drop")
# B. Join ke Peta Dasar Sumatera
map_beta <- map_sumatera_base %>%
left_join(dat_beta, by = c("PROV_MATCH" = "PROV"))
# C. Plotting
# Catatan Penting: Nilai yang di-plot adalah Koefisien Beta asli dalam skala Log.
# Karena kita menggunakan model untuk data Count (Log-Link), interpretasinya:
# Nilai positif = meningkatkan risiko banjir; Negatif = menurunkan risiko.
p_beta_year <- ggplot(map_beta) +
# Layer Peta Koroplet
geom_sf(aes(fill = beta), color = "white", linewidth = 0.3) +
# Layer Label Provinsi
geom_text(data = label_df, aes(x = lon, y = lat, label = label),
inherit.aes = FALSE, color = "black", size = 2.5,
fontface = "bold", check_overlap = TRUE) +
# Skala Warna (Viridis: Default yang bagus untuk peta tematik)
scale_fill_viridis_c(
option = "viridis",
name = "Nilai Beta\n(Log Scale)", # Label diperjelas
na.value = "grey90" # Warna abu-abu jika data provinsi tersebut kosong
) +
# Judul dan Subtitle
labs(
title = paste0("Sebaran Koefisien Lokal GTWR: Variabel ", vb),
subtitle = paste0("Tahun: ", yy, " • Konteks: Model Data Count (Log-Link)"),
x = NULL, y = NULL
) +
# Tema Minimalis untuk Peta
theme_minimal(base_size = 11) +
theme(
plot.title = element_text(face = "bold"),
panel.grid = element_blank(), # Hapus garis grid latar belakang
axis.text = element_blank(), # Hapus angka koordinat lat/lon di pinggir
legend.position = "right"
)
# D. Simpan Gambar
if (SAVE_VIS) {
# Format nama file: Peta_Beta_t_X1_2015.png, dst.
filename <- sprintf("Peta_Beta_%s_%d.png", safe_var_name, yy)
# Simpan dengan resolusi yang cukup baik (dpi=200)
# Menggunakan tryCatch agar jika satu gambar gagal, loop tidak berhenti total
tryCatch({
ggsave(filename, p_beta_year, width = 8, height = 6, dpi = 200)
cat(sprintf(" [Saved] %s\n", filename))
}, error = function(e) {
cat(sprintf(" [ERROR] Gagal menyimpan %s: %s\n", filename, e$message))
})
} else {
# Jika SAVE_VIS = FALSE, hanya print status
cat(sprintf(" [Done] Tahun %d (Tidak disimpan)\n", yy))
}
# Paksa R untuk membersihkan memori setelah setiap plot agar tidak berat
gc(verbose = FALSE)
} # End Loop Tahun
cat("\n") # Spasi antar variabel di console
} # End Loop Variabel
## >>> Memproses Peta untuk Variabel: t_X1 <<<
## [Done] Tahun 2018 (Tidak disimpan)
## [Done] Tahun 2019 (Tidak disimpan)
## [Done] Tahun 2020 (Tidak disimpan)
## [Done] Tahun 2021 (Tidak disimpan)
## [Done] Tahun 2022 (Tidak disimpan)
## [Done] Tahun 2023 (Tidak disimpan)
## [Done] Tahun 2024 (Tidak disimpan)
##
## >>> Memproses Peta untuk Variabel: t_X2 <<<
## [Done] Tahun 2018 (Tidak disimpan)
## [Done] Tahun 2019 (Tidak disimpan)
## [Done] Tahun 2020 (Tidak disimpan)
## [Done] Tahun 2021 (Tidak disimpan)
## [Done] Tahun 2022 (Tidak disimpan)
## [Done] Tahun 2023 (Tidak disimpan)
## [Done] Tahun 2024 (Tidak disimpan)
##
## >>> Memproses Peta untuk Variabel: t_X3 <<<
## [Done] Tahun 2018 (Tidak disimpan)
## [Done] Tahun 2019 (Tidak disimpan)
## [Done] Tahun 2020 (Tidak disimpan)
## [Done] Tahun 2021 (Tidak disimpan)
## [Done] Tahun 2022 (Tidak disimpan)
## [Done] Tahun 2023 (Tidak disimpan)
## [Done] Tahun 2024 (Tidak disimpan)
##
## >>> Memproses Peta untuk Variabel: t_X4 <<<
## [Done] Tahun 2018 (Tidak disimpan)
## [Done] Tahun 2019 (Tidak disimpan)
## [Done] Tahun 2020 (Tidak disimpan)
## [Done] Tahun 2021 (Tidak disimpan)
## [Done] Tahun 2022 (Tidak disimpan)
## [Done] Tahun 2023 (Tidak disimpan)
## [Done] Tahun 2024 (Tidak disimpan)
##
## >>> Memproses Peta untuk Variabel: t_X5 <<<
## [Done] Tahun 2018 (Tidak disimpan)
## [Done] Tahun 2019 (Tidak disimpan)
## [Done] Tahun 2020 (Tidak disimpan)
## [Done] Tahun 2021 (Tidak disimpan)
## [Done] Tahun 2022 (Tidak disimpan)
## [Done] Tahun 2023 (Tidak disimpan)
## [Done] Tahun 2024 (Tidak disimpan)
##
## >>> Memproses Peta untuk Variabel: t_X6 <<<
## [Done] Tahun 2018 (Tidak disimpan)
## [Done] Tahun 2019 (Tidak disimpan)
## [Done] Tahun 2020 (Tidak disimpan)
## [Done] Tahun 2021 (Tidak disimpan)
## [Done] Tahun 2022 (Tidak disimpan)
## [Done] Tahun 2023 (Tidak disimpan)
## [Done] Tahun 2024 (Tidak disimpan)
##
## >>> Memproses Peta untuk Variabel: t_X7 <<<
## [Done] Tahun 2018 (Tidak disimpan)
## [Done] Tahun 2019 (Tidak disimpan)
## [Done] Tahun 2020 (Tidak disimpan)
## [Done] Tahun 2021 (Tidak disimpan)
## [Done] Tahun 2022 (Tidak disimpan)
## [Done] Tahun 2023 (Tidak disimpan)
## [Done] Tahun 2024 (Tidak disimpan)
cat("--- SEMUA PETA EVOLUSI BETA SELESAI DIBUAT ---\n")
## --- SEMUA PETA EVOLUSI BETA SELESAI DIBUAT ---
# ==============================================================================
# VISUALISASI EVOLUSI KOEFISIEN BETA (2018-2024)
# Mode: Slideshow / Animasi Langsung di RStudio (Tanpa Simpan File)
# ==============================================================================
# 1. KONFIGURASI ANIMASI
PAUSE_SEC <- 2.0 # Jeda 2 detik per tahun (agar mata sempat mengamati perubahan)
# 2. PERSIAPAN DATA (Fokus pada Data Historis GTWR 2018-2024)
# Ambil tahun yang ada di hasil pemodelan GTWR (seharusnya 2018-2024)
years_seq <- sort(unique(df_gtwr$Tahun))
# Deteksi variabel X secara otomatis (t_X1 s/d t_X7)
coef_vars <- names(df_gtwr)[grepl("t_X", names(df_gtwr))]
# Bersihkan dari kolom error/t-value jika ada
coef_vars <- coef_vars[!grepl("_SE$|_TV$", coef_vars)]
cat("\n--- MULAI SLIDESHOW EVOLUSI BETA (2018-2024) ---\n")
##
## --- MULAI SLIDESHOW EVOLUSI BETA (2018-2024) ---
cat("Rentang Data: ", min(years_seq), " sampai ", max(years_seq), "\n")
## Rentang Data: 2018 sampai 2024
cat("Perhatikan tab 'Plots' di RStudio Anda...\n\n")
## Perhatikan tab 'Plots' di RStudio Anda...
# 3. LOOPING VISUALISASI
for (vb in coef_vars) {
cat(paste0(">>> Menampilkan Peta Variabel: ", vb, " <<<\n"))
for (yy in years_seq) {
# A. Filter Data per Tahun (2018...2024)
dat_beta <- df_gtwr %>%
filter(Tahun == yy) %>%
mutate(PROV = toupper(Provinsi)) %>%
group_by(PROV) %>%
summarise(beta = mean(.data[[vb]], na.rm=TRUE), .groups="drop")
# B. Gabungkan dengan Peta Dasar
map_beta <- map_sumatera_base %>%
left_join(dat_beta, by = c("PROV_MATCH" = "PROV"))
# C. Buat Plot
p_beta_year <- ggplot(map_beta) +
geom_sf(aes(fill = beta), color = "white", linewidth = 0.3) +
# Label Nama Provinsi
geom_text(data = label_df, aes(x = lon, y = lat, label = label),
inherit.aes = FALSE, color = "black", size = 3,
fontface = "bold", check_overlap = TRUE) +
# Skala Warna (Beta Log-Linear)
scale_fill_viridis_c(
option = "viridis",
name = "Beta (Log)",
na.value = "grey90"
) +
# Judul Dinamis
labs(
title = paste0("Dinamika Pengaruh Variabel: ", vb),
subtitle = paste0("Tahun: ", yy, " (Data Panel Historis)"),
caption = "Nilai Beta menunjukkan sensitivitas wilayah terhadap variabel tersebut",
x = NULL, y = NULL
) +
theme_minimal(base_size = 12) +
theme(
plot.title = element_text(face = "bold", size = 14),
panel.grid = element_blank(),
axis.text = element_blank(),
legend.position = "right"
)
# D. TAMPILKAN LANGSUNG (RENDER)
print(p_beta_year)
# E. Tahan sebentar (Pause)
Sys.sleep(PAUSE_SEC)
} # End Loop Tahun
cat(paste0(" [Selesai] Variabel ", vb, ". Pindah ke variabel berikutnya...\n"))
Sys.sleep(1) # Jeda sedikit sebelum ganti variabel
} # End Loop Variabel
## >>> Menampilkan Peta Variabel: t_X1 <<<







## [Selesai] Variabel t_X1. Pindah ke variabel berikutnya...
## >>> Menampilkan Peta Variabel: t_X2 <<<







## [Selesai] Variabel t_X2. Pindah ke variabel berikutnya...
## >>> Menampilkan Peta Variabel: t_X3 <<<







## [Selesai] Variabel t_X3. Pindah ke variabel berikutnya...
## >>> Menampilkan Peta Variabel: t_X4 <<<







## [Selesai] Variabel t_X4. Pindah ke variabel berikutnya...
## >>> Menampilkan Peta Variabel: t_X5 <<<







## [Selesai] Variabel t_X5. Pindah ke variabel berikutnya...
## >>> Menampilkan Peta Variabel: t_X6 <<<







## [Selesai] Variabel t_X6. Pindah ke variabel berikutnya...
## >>> Menampilkan Peta Variabel: t_X7 <<<







## [Selesai] Variabel t_X7. Pindah ke variabel berikutnya...
cat("\n--- Slideshow Selesai. ---\n")
##
## --- Slideshow Selesai. ---