Data_Tubes <- read_excel("G:/My Drive/Semester 5/Analisis Runtun Waktu/Analisis Runtun Waktu/Tubes ARW/Data Tubes.xlsx")
Data_Tubes$Data <- gsub("\\.", "", Data_Tubes$Data)
Data_Tubes$Data <- as.numeric(Data_Tubes$Data)
print(Data_Tubes)
## # A tibble: 60 × 3
##    Tahun Bulan      Data
##    <dbl> <dbl>     <dbl>
##  1  2020     1 120255000
##  2  2020     2 146131000
##  3  2020     3 164074000
##  4  2020     4  64987000
##  5  2020     5   6849000
##  6  2020     6  66456000
##  7  2020     7 152926000
##  8  2020     8 131194000
##  9  2020     9  60919000
## 10  2020    10  99546000
## # ℹ 50 more rows
sum(is.na(Data_Tubes$Data)) 
## [1] 0
cat("=== Statistik Deskriptif ===\n")
## === Statistik Deskriptif ===
summary(Data_Tubes$Data)  
##      Min.   1st Qu.    Median      Mean   3rd Qu.      Max. 
##   2130000  33859606  54850833  66046398  93062500 164074000
ts_data <- ts(Data_Tubes$Data, start = c(2020, 1), frequency = 12)

training <- Data_Tubes[1:48, ]    
testing  <- Data_Tubes[49:60, ]   

ts_training <- ts(training$Data, start = c(2020, 1), frequency = 12)
ts_testing  <- ts(testing$Data, start = c(2024, 1), frequency = 12)

df_training <- data.frame(
  Tahun = as.Date(zoo::as.yearmon(time(ts_training))), 
  Nilai = as.numeric(ts_training),
  Tipe = "Training"
)

df_testing <- data.frame(
  Tahun = as.Date(zoo::as.yearmon(time(ts_testing))), 
  Nilai = as.numeric(ts_testing),
  Tipe = "Testing"
)
df_plot <- bind_rows(df_training, df_testing)
ggplot(df_plot, aes(x = Tahun, y = Nilai, color = Tipe, linetype = Tipe)) +
  geom_line(size = 0.5) +
  scale_color_manual(values = c("Training" = "blue", "Testing" = "blue")) +
  scale_linetype_manual(values = c("Training" = "solid", "Testing" = "dashed")) +
  geom_vline(xintercept = as.Date("2024-01-01"), linetype = "dotted", color = "gray40") +
  labs(title = "Plot Deret Waktu (Training vs Testing)",
       x = "Tahun", y = "Nilai") +
  theme_minimal(base_size = 12) +
  theme(legend.title = element_blank())
## Warning: Using `size` aesthetic for lines was deprecated in ggplot2 3.4.0.
## ℹ Please use `linewidth` instead.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.

#boxplot tahunan
training$Periode <- "2020–2023"

ggplot(training, aes(x = Periode, y = Data)) +
  geom_boxplot(fill = "lightgreen", color = "darkgreen",
               outlier.colour = "red", outlier.size = 2) +
  labs(
    title = "Boxplot Data Training (2020–2023)",
    x = "",
    y = "Nilai Denda Pelanggaran"
  ) +
  theme_minimal(base_size = 12) +
  theme(
    plot.title = element_text(face = "bold", hjust = 0.5, size = 14),
    axis.title = element_text(face = "bold"),
    axis.text.x = element_text(face = "bold")
  )

#boxplot per tahun
training$Tahun <- rep(2020:2023, each = 12)

ggplot(training, aes(x = factor(Tahun), y = Data)) +
  geom_boxplot(fill = "skyblue", color = "darkblue", outlier.colour = "red", outlier.size = 2) +
  labs(title = "Boxplot Data Training per Tahun (2020–2023)",
       x = "Tahun",
       y = "Nilai Denda Pelanggaran") +
  theme_minimal(base_size = 12) +
  theme(
    plot.title = element_text(face = "bold", hjust = 0.5, size = 14),
    axis.title = element_text(face = "bold")
  )

#boxplot perbulan
training$Index <- 1:nrow(training)
training$Tahun <- 2020 + (training$Index - 1) %/% 12
training$BulanNum <- ((training$Index - 1) %% 12) + 1
training$Bulan <- month.abb[training$BulanNum]
training$Bulan <- factor(training$Bulan, levels = month.abb)
ggplot(training, aes(x = Bulan, y = Data)) +
  geom_boxplot(fill = "gold", color = "darkorange",
               outlier.colour = "red", outlier.size = 2) +
  labs(title = "Boxplot Data Training per Bulan (2020–2023)",
       x = "Bulan",
       y = "Nilai Denda Pelanggaran") +
  theme_minimal(base_size = 12) +
  theme(
    plot.title = element_text(face = "bold", hjust = 0.5, size = 14),
    axis.title = element_text(face = "bold"),
    axis.text.x = element_text(angle = 45, hjust = 1)
  )

model_des_fit <- holt(ts_training, h = length(ts_testing),
                      initial = "optimal", seasonal = "none")

nilai_fitted_des <- ts(fitted(model_des_fit),
                       start = start(ts_training),
                       frequency = frequency(ts_training))

print("--- Ringkasan Model Double Exponential Smoothing (Holt's Method) ---")
## [1] "--- Ringkasan Model Double Exponential Smoothing (Holt's Method) ---"
summary(model_des_fit)
## 
## Forecast method: Holt's method
## 
## Model Information:
## Holt's method 
## 
## Call:
## holt(y = ts_training, h = length(ts_testing), initial = "optimal", 
##     seasonal = "none")
## 
##   Smoothing parameters:
##     alpha = 0.3681 
##     beta  = 1e-04 
## 
##   Initial states:
##     l = 122128399.7153 
##     b = -3780854.6827 
## 
##   sigma:  49224990
## 
##      AIC     AICc      BIC 
## 1891.985 1893.413 1901.341 
## 
## Error measures:
##                   ME     RMSE      MAE       MPE     MAPE      MASE       ACF1
## Training set 5590486 47129340 35856275 -111.0776 154.1508 0.5740707 0.03568187
## 
## Forecasts:
##          Point Forecast      Lo 80    Hi 80      Lo 95     Hi 95
## Jan 2024       36038588  -27045775 99122951  -60440620 132517796
## Feb 2024       32284569  -34939549 99508686  -70525847 135094984
## Mar 2024       28530550  -42594833 99655933  -80246337 137307436
## Apr 2024       24776530  -50048943 99602003  -89659155 139212216
## May 2024       21022511  -57330383 99375405  -98807899 140852922
## Jun 2024       17268492  -64461514 98998498 -107726766 142263750
## Jul 2024       13514473  -71460260 98489205 -116443166 143472111
## Aug 2024        9760453  -78341249 97862156 -124979473 144500380
## Sep 2024        6006434  -85116605 97129474 -133354229 145367097
## Oct 2024        2252415  -91796510 96301340 -141583004 146087834
## Nov 2024       -1501604  -98389612 95386404 -149679025 146675817
## Dec 2024       -5255623 -104903331 94392084 -157653641 147142394
# --- Plot hasil DES ---
autoplot(ts_training, series = "Data Aktual (2020-2023)") +
  autolayer(nilai_fitted_des, series = "Garis DES (Smoothing)") +
  labs(
    title = "Double Exponential Smoothing (DES) (2020-2023)",
    subtitle = paste(
      "Alpha Optimal:", round(model_des_fit$model$par["alpha"], 4),
      "| Beta Optimal:", round(model_des_fit$model$par["beta"], 4)
    ),
    y = "Jumlah Denda (Rp)",
    x = "Waktu"
  ) +
  guides(colour = guide_legend(title = "Deret Waktu")) +
  theme(legend.position = "bottom")

cat("=== UJI STASIONERITAS ADF TEST ===\n")
## === UJI STASIONERITAS ADF TEST ===
adf_test_result <- adf.test(ts_training)
print(adf_test_result) 
## 
##  Augmented Dickey-Fuller Test
## 
## data:  ts_training
## Dickey-Fuller = -2.7682, Lag order = 3, p-value = 0.2673
## alternative hypothesis: stationary
t <- 1:length(ts_training)
lm_trend <- lm(ts_training ~ t)
summary(lm_trend)
## 
## Call:
## lm(formula = ts_training ~ t)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -72820639 -39235110 -12320695  41268270  90725432 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)    
## (Intercept) 83708653   14122442   5.927 3.72e-07 ***
## t            -807803     501766  -1.610    0.114    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 48160000 on 46 degrees of freedom
## Multiple R-squared:  0.05334,    Adjusted R-squared:  0.03276 
## F-statistic: 2.592 on 1 and 46 DF,  p-value: 0.1143
plot(ts_training, main="Regresi Tren", ylab="Denda")
lines(ts_training, col="black")
lines(lm_trend$fitted.values, col="red", lwd=2)

t_future <- (length(ts_training)+1):(length(ts_training)+12)
reg_pred <- predict(lm_trend, newdata = list(t = t_future))
# ====== ACF ======
acf(ts_data, main = "ACF Original Data")

# Nilai ACF
acf_values <- acf(ts_data, plot = FALSE)

cat("\n=== Nilai ACF ===\n")
## 
## === Nilai ACF ===
print(acf_values)
## 
## Autocorrelations of series 'ts_data', by lag
## 
## 0.0000 0.0833 0.1667 0.2500 0.3333 0.4167 0.5000 0.5833 0.6667 0.7500 0.8333 
##  1.000  0.354  0.143  0.085  0.035  0.114  0.014  0.058 -0.208 -0.160 -0.344 
## 0.9167 1.0000 1.0833 1.1667 1.2500 1.3333 1.4167 
## -0.356 -0.199 -0.291 -0.167 -0.223 -0.153 -0.036
# ====== PACF ======
pacf(ts_data, main = "PACF Original Data")

# Nilai PACF
pacf_values <- pacf(ts_data, plot = FALSE)

cat("\n=== Nilai PACF ===\n")
## 
## === Nilai PACF ===
print(pacf_values)
## 
## Partial autocorrelations of series 'ts_data', by lag
## 
## 0.0833 0.1667 0.2500 0.3333 0.4167 0.5000 0.5833 0.6667 0.7500 0.8333 0.9167 
##  0.354  0.020  0.032 -0.008  0.114 -0.073  0.072 -0.299  0.015 -0.368 -0.102 
## 1.0000 1.0833 1.1667 1.2500 1.3333 1.4167 
## -0.104 -0.154 -0.069 -0.087 -0.102  0.098
# ====== UJI ADF ======
adf_test <- adf.test(ts_data)
cat("\n=== Uji ADF (Mean Stasioner?) ===\n")
## 
## === Uji ADF (Mean Stasioner?) ===
print(adf_test)
## 
##  Augmented Dickey-Fuller Test
## 
## data:  ts_data
## Dickey-Fuller = -3.2627, Lag order = 3, p-value = 0.08607
## alternative hypothesis: stationary
# ====== CEK STASIONER VARIANSI (MANUAL) ======
cat("\n=== Uji Stasioner Variansi (Manual Check) ===\n")
## 
## === Uji Stasioner Variansi (Manual Check) ===
n <- length(ts_data)
half <- floor(n/2)

bagian_awal  <- ts_data[1:half]
bagian_akhir <- ts_data[(half+1):n]

var_awal  <- var(bagian_awal)
var_akhir <- var(bagian_akhir)

mean_awal  <- mean(bagian_awal)
mean_akhir <- mean(bagian_akhir)

cat("Mean Bagian Awal  :", round(mean_awal, 3), "\n")
## Mean Bagian Awal  : 74138807
cat("Mean Bagian Akhir :", round(mean_akhir, 3), "\n\n")
## Mean Bagian Akhir : 57953989
cat("Varians Bagian Awal  :", round(var_awal, 3), "\n")
## Varians Bagian Awal  : 2.866654e+15
cat("Varians Bagian Akhir :", round(var_akhir, 3), "\n\n")
## Varians Bagian Akhir : 1.2262e+15
if(abs(var_awal - var_akhir) / var_awal < 0.20){
  cat("Kesimpulan: Variansi relatif stabil (stasioner variansi)\n")
} else {
  cat("Kesimpulan: Variansi berubah signifikan → tidak stasioner variansi\n")
}
## Kesimpulan: Variansi berubah signifikan → tidak stasioner variansi
cat("\n=== LANGKAH: Box–Cox Transformation Check ===\n")
## 
## === LANGKAH: Box–Cox Transformation Check ===
# Cari lambda optimal
lambda_opt <- BoxCox.lambda(ts_data)
cat("Lambda optimal =", round(lambda_opt, 3), "\n\n")
## Lambda optimal = 0.598
# Plot Box-Cox
boxcox(ts_data ~ 1, 
       lambda = seq(-2, 2, 0.1), 
       main = "Box–Cox Plot")
## Warning: In lm.fit(x, y, offset = offset, singular.ok = singular.ok, ...) :
##  extra argument 'main' will be disregarded

cat("=== LANGKAH 1: Box-Cox Sebelum Transformasi ===\n")
## === LANGKAH 1: Box-Cox Sebelum Transformasi ===
ts_sqrt <- sqrt(ts_data)

# Plot data asli dan data setelah transformasi
plot(ts_data, type="l", col="darkred", lwd=2,
     main="Data Asli (Sebelum Transformasi)",
     ylab="Nilai", xlab="Time")

plot(ts_sqrt, type="l", col="darkblue", lwd=2,
     main="Data Setelah Transformasi Akar Kuadrat",
     ylab="Nilai", xlab="Time")

# --- LANGKAH 2: ACF & PACF Setelah Transformasi ---
par(mfrow=c(1,2))
acf(ts_sqrt, main="ACF - Setelah Transformasi Akar Kuadrat")
pacf(ts_sqrt, main="PACF - Setelah Transformasi Akar Kuadrat")

par(mfrow=c(1,1))

# Simpan nilai numerik ACF & PACF
acf_vals  <- acf(ts_sqrt, plot = FALSE)
pacf_vals <- pacf(ts_sqrt, plot = FALSE)

cat("\n=== Nilai Numerik ACF ===\n")
## 
## === Nilai Numerik ACF ===
print(acf_vals)
## 
## Autocorrelations of series 'ts_sqrt', by lag
## 
## 0.0000 0.0833 0.1667 0.2500 0.3333 0.4167 0.5000 0.5833 0.6667 0.7500 0.8333 
##  1.000  0.378  0.201  0.094  0.034  0.089 -0.036  0.058 -0.194 -0.183 -0.311 
## 0.9167 1.0000 1.0833 1.1667 1.2500 1.3333 1.4167 
## -0.373 -0.221 -0.321 -0.163 -0.228 -0.180  0.016
cat("\n=== Nilai Numerik PACF ===\n")
## 
## === Nilai Numerik PACF ===
print(pacf_vals)
## 
## Partial autocorrelations of series 'ts_sqrt', by lag
## 
## 0.0833 0.1667 0.2500 0.3333 0.4167 0.5000 0.5833 0.6667 0.7500 0.8333 0.9167 
##  0.378  0.068 -0.003 -0.015  0.089 -0.111  0.102 -0.281 -0.036 -0.259 -0.172 
## 1.0000 1.0833 1.1667 1.2500 1.3333 1.4167 
## -0.046 -0.196 -0.030 -0.133 -0.123  0.138
# --- LANGKAH 3: Uji ADF Setelah Transformasi ---
adf_sqrt <- adf.test(ts_sqrt)
print(adf_sqrt)
## 
##  Augmented Dickey-Fuller Test
## 
## data:  ts_sqrt
## Dickey-Fuller = -3.2305, Lag order = 3, p-value = 0.09117
## alternative hypothesis: stationary
cat("\nInterpretasi:\n")
## 
## Interpretasi:
if(adf_sqrt$p.value < 0.05){
  cat("→ p-value < 0.05 → Data SUDAH stasioner mean setelah transformasi akar.\n")
} else {
  cat("→ p-value ≥ 0.05 → Data BELUM stasioner mean → perlu differencing.\n")
}
## → p-value ≥ 0.05 → Data BELUM stasioner mean → perlu differencing.
ts_sqrt_diff <- diff(ts_sqrt)   

cat("\n=== Data Setelah Differencing (Transformasi Akar + Diff 1) ===\n")
## 
## === Data Setelah Differencing (Transformasi Akar + Diff 1) ===
print(head(ts_sqrt_diff))
##             Feb        Mar        Apr        May        Jun        Jul
## 2020  1122.3815   720.6718 -4747.6859 -5444.3921  5534.9955  4214.2703
cat("\n=== ACF & PACF Setelah Differencing ===\n")
## 
## === ACF & PACF Setelah Differencing ===
par(mfrow=c(1,2))
acf(ts_sqrt_diff, main="ACF - Setelah Differencing")
pacf(ts_sqrt_diff, main="PACF - Setelah Differencing")

par(mfrow=c(1,1))

acf_diff_vals  <- acf(ts_sqrt_diff, plot = FALSE)
pacf_diff_vals <- pacf(ts_sqrt_diff, plot = FALSE)

cat("\n=== Nilai Numerik ACF Setelah Differencing ===\n")
## 
## === Nilai Numerik ACF Setelah Differencing ===
print(acf_diff_vals)
## 
## Autocorrelations of series 'ts_sqrt_diff', by lag
## 
## 0.0000 0.0833 0.1667 0.2500 0.3333 0.4167 0.5000 0.5833 0.6667 0.7500 0.8333 
##  1.000 -0.371 -0.062 -0.012 -0.066  0.118 -0.203  0.288 -0.195  0.101 -0.028 
## 0.9167 1.0000 1.0833 1.1667 1.2500 1.3333 1.4167 
## -0.176  0.217 -0.218  0.189 -0.101 -0.110  0.164
cat("\n=== Nilai Numerik PACF Setelah Differencing ===\n")
## 
## === Nilai Numerik PACF Setelah Differencing ===
print(pacf_diff_vals)
## 
## Partial autocorrelations of series 'ts_sqrt_diff', by lag
## 
## 0.0833 0.1667 0.2500 0.3333 0.4167 0.5000 0.5833 0.6667 0.7500 0.8333 0.9167 
## -0.371 -0.231 -0.154 -0.185 -0.005 -0.235  0.153 -0.098  0.086 -0.014 -0.163 
## 1.0000 1.0833 1.1667 1.2500 1.3333 1.4167 
##  0.021 -0.163 -0.017 -0.065 -0.256 -0.032
cat("\n=== Uji ADF Setelah Differencing ===\n")
## 
## === Uji ADF Setelah Differencing ===
adf_diff <- adf.test(ts_sqrt_diff)
## Warning in adf.test(ts_sqrt_diff): p-value smaller than printed p-value
print(adf_diff)
## 
##  Augmented Dickey-Fuller Test
## 
## data:  ts_sqrt_diff
## Dickey-Fuller = -5.9242, Lag order = 3, p-value = 0.01
## alternative hypothesis: stationary
cat("\nInterpretasi ADF:\n")
## 
## Interpretasi ADF:
if(adf_diff$p.value < 0.05){
  cat("→ p-value < 0.05 → Data SUDAH stasioner mean setelah differencing.\n")
} else {
  cat("→ Data MASIH belum stasioner, mungkin perlu differencing tambahan.\n")
}
## → p-value < 0.05 → Data SUDAH stasioner mean setelah differencing.
cat("\n=== Uji ARCH / Heteroskedastisitas ===\n")
## 
## === Uji ARCH / Heteroskedastisitas ===
library(FinTS)
arch_test <- ArchTest(ts_sqrt_diff)
print(arch_test)
## 
##  ARCH LM-test; Null hypothesis: no ARCH effects
## 
## data:  ts_sqrt_diff
## Chi-squared = 4.651, df = 12, p-value = 0.9686
cat("\nInterpretasi ARCH:\n")
## 
## Interpretasi ARCH:
if(arch_test$p.value > 0.05){
  cat("→ p-value > 0.05 → TIDAK ada heteroskedastisitas → variansi sudah stasioner.\n")
} else {
  cat("→ p-value ≤ 0.05 → Ada heteroskedastisitas → variansi BELUM stasioner.\n")
}
## → p-value > 0.05 → TIDAK ada heteroskedastisitas → variansi sudah stasioner.
ts_sqrt <- ts(ts_sqrt, start = c(2020, 1), frequency = 12)

# ------------------------------
# 2. ARIMA Manual (tanpa auto.arima)
# ------------------------------
cat("=== ARIMA MANUAL TANPA auto.arima ===\n")
## === ARIMA MANUAL TANPA auto.arima ===
m_arima_011 <- arima(ts_sqrt, order=c(0,1,1))
m_arima_111 <- arima(ts_sqrt, order=c(1,1,1))
m_arima_012 <- arima(ts_sqrt, order=c(0,1,2))
m_arima_211 <- arima(ts_sqrt, order=c(2,1,1))
m_arima_110 <- arima(ts_sqrt, order=c(1,1,0))

# ------------------------------
# 3. Bandingkan AIC
# ------------------------------
cat("\n=== AIC SETIAP MODEL ===\n")
## 
## === AIC SETIAP MODEL ===
AIC(m_arima_011, m_arima_111, m_arima_012, m_arima_211, m_arima_110)
##             df      AIC
## m_arima_011  2 1111.151
## m_arima_111  3 1109.548
## m_arima_012  3 1112.100
## m_arima_211  4 1110.993
## m_arima_110  2 1116.817
# ------------------------------
# 4. Pilih model terbaik
# ------------------------------
model_best <- m_arima_110
cat("\n=== MODEL TERPILIH ===\n")
## 
## === MODEL TERPILIH ===
print(model_best)
## 
## Call:
## arima(x = ts_sqrt, order = c(1, 1, 0))
## 
## Coefficients:
##           ar1
##       -0.3655
## s.e.   0.1198
## 
## sigma^2 estimated as 9074748:  log likelihood = -556.41,  aic = 1116.82
# ------------------------------
# 5. Diagnostik Residual
# ------------------------------
par(mfrow=c(2,2))
plot(model_best$residuals, main="Residual Plot")
acf(model_best$residuals, main="ACF Residual")
pacf(model_best$residuals, main="PACF Residual")
hist(model_best$residuals, main="Histogram Residual", xlab="Residual")

par(mfrow=c(1,1))

# ------------------------------
# 6. Uji Ljung-Box (independensi residual)
# ------------------------------
cat("\n=== UJI LJUNG-BOX ===\n")
## 
## === UJI LJUNG-BOX ===
print(Box.test(model_best$residuals, lag=12, type="Ljung"))
## 
##  Box-Ljung test
## 
## data:  model_best$residuals
## X-squared = 12.462, df = 12, p-value = 0.4093
# ------------------------------
# 7. Uji ARCH (heteroskedastisitas)
# ------------------------------
cat("\n=== UJI ARCH (HETEROSKEDASTISITAS) ===\n")
## 
## === UJI ARCH (HETEROSKEDASTISITAS) ===
arch_test <- ArchTest(model_best$residuals, lags=12)
print(arch_test)
## 
##  ARCH LM-test; Null hypothesis: no ARCH effects
## 
## data:  model_best$residuals
## Chi-squared = 6.3834, df = 12, p-value = 0.8955
# ------------------------------
# 8. Forecast Tahun 2024 (12 bulan)
# ------------------------------
cat("\n=== FORECAST TAHUN 2024 (12 BULAN) ===\n")
## 
## === FORECAST TAHUN 2024 (12 BULAN) ===
forecast_2024 <- forecast(model_best, h = 12)
print(forecast_2024)
##          Point Forecast      Lo 80    Hi 80      Lo 95    Hi 95
## Jan 2025       7686.923  3826.3354 11547.51  1782.6639 13591.18
## Feb 2025       7596.489  3024.2595 12168.72   603.8673 14589.11
## Mar 2025       7629.538  2179.9093 13079.17  -704.9501 15964.03
## Apr 2025       7617.460  1501.1194 13733.80 -1736.6760 16971.60
## May 2025       7621.874   875.9137 14367.84 -2695.1822 17938.93
## Jun 2025       7620.261   308.3706 14932.15 -3562.3105 18802.83
## Jul 2025       7620.851  -219.5236 15461.23 -4369.9671 19611.67
## Aug 2025       7620.635  -713.6424 15954.91 -5125.5425 20366.81
## Sep 2025       7620.714 -1180.1862 16421.61 -5839.1013 21080.53
## Oct 2025       7620.685 -1623.1757 16864.55 -6516.5801 21757.95
## Nov 2025       7620.696 -2045.8969 17287.29 -7163.0820 22404.47
## Dec 2025       7620.692 -2450.8881 17692.27 -7782.4606 23023.84
forecast_original_scale <- forecast_2024$mean^2
cat("\nForecast di skala asli (kuadrat dari transformasi akar):\n")
## 
## Forecast di skala asli (kuadrat dari transformasi akar):
print(forecast_original_scale)
##           Jan      Feb      Mar      Apr      May      Jun      Jul      Aug
## 2025 59088780 57706648 58209856 58025704 58092969 58068382 58077367 58074083
##           Sep      Oct      Nov      Dec
## 2025 58075283 58074845 58075005 58074946
plot(forecast_2024,
     main = "Forecast ARIMA Manual (Transformasi Akar)",
     xlab = "Tahun",
     ylab = "Nilai",
     xlim = c(2020, 2025))

# ------------------------------
# 9. Uji Normalitas Residual (Kolmogorov-Smirnov)
# ------------------------------
cat("\n=== UJI KOLMOGOROV–SMIRNOV (Normalitas) ===\n")
## 
## === UJI KOLMOGOROV–SMIRNOV (Normalitas) ===
resid <- model_best$residuals
ks <- ks.test(resid, "pnorm", mean(resid), sd(resid))
print(ks)
## 
##  Exact one-sample Kolmogorov-Smirnov test
## 
## data:  resid
## D = 0.063033, p-value = 0.9588
## alternative hypothesis: two-sided
if (ks$p.value > 0.05) {
  cat("→ Residual NORMAL (p > 0.05)\n")
} else {
  cat("→ Residual TIDAK NORMAL (p ≤ 0.05)\n")
}
## → Residual NORMAL (p > 0.05)
rmse  <- function(a,b) sqrt(mean((a-b)^2))
mape  <- function(a,b) mean(abs((a-b)/a)) * 100
smape <- function(a,b) mean(2 * abs(a-b) / (abs(a)+abs(b))) * 100

actual_sqrt <- ts_sqrt[2:length(ts_sqrt)]
pred_sqrt   <- ts_sqrt[2:length(ts_sqrt)] - model_best$residuals
## Warning in `-.default`(ts_sqrt[2:length(ts_sqrt)], model_best$residuals):
## longer object length is not a multiple of shorter object length
n <- min(length(actual_sqrt), length(pred_sqrt))
actual_sqrt <- actual_sqrt[1:n]
pred_sqrt   <- pred_sqrt[1:n]

eval_sqrt <- data.frame(
  Model = c("Model_Best (Transformasi Akar)"),
  RMSE  = c(rmse(actual_sqrt, pred_sqrt)),
  MAPE  = c(mape(actual_sqrt, pred_sqrt)),
  SMAPE = c(smape(actual_sqrt, pred_sqrt))
)

cat("\n=== Evaluasi di Skala Transformasi Akar ===\n")
## 
## === Evaluasi di Skala Transformasi Akar ===
print(eval_sqrt)
##                            Model     RMSE     MAPE    SMAPE
## 1 Model_Best (Transformasi Akar) 3011.388 37.60024 38.41022
actual_orig <- actual_sqrt^2
pred_orig   <- pred_sqrt^2

eval_orig <- data.frame(
  Model = c("Model_Best (Skala Asli)"),
  RMSE  = c(rmse(actual_orig, pred_orig)),
  MAPE  = c(mape(actual_orig, pred_orig)),
  SMAPE = c(smape(actual_orig, pred_orig))
)

cat("\n=== Evaluasi di Skala Asli (Kuadrat dari Transformasi Akar) ===\n")
## 
## === Evaluasi di Skala Asli (Kuadrat dari Transformasi Akar) ===
print(eval_orig)
##                     Model     RMSE     MAPE   SMAPE
## 1 Model_Best (Skala Asli) 51658362 81.59334 62.9538