IMPORT LIBRARY

#Import library yang dibutuhkan
library(readxl)
library(ggplot2)
library(forecast)
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
library(pastecs) #stat.desc
library(tseries) #adf.test
library(Metrics)
## 
## Attaching package: 'Metrics'
## The following object is masked from 'package:forecast':
## 
##     accuracy

LOAD DATA

Data Jumlah Pengunjung di Lokawisata Baturaden per bulan (Januari - Desember) dari tahun 2014-2023.

df <- read_excel("C:/Users/naura/Downloads/FOLDER NADIA/SEMESTER 8/Data Terbaru.xlsx")
df
## # A tibble: 120 × 2
##    Bulan     Jumlah_Pengunjung
##    <chr>                 <dbl>
##  1 Januari               47237
##  2 Februari              18009
##  3 Maret                 17462
##  4 April                 17974
##  5 Mei                   24137
##  6 Juni                  28620
##  7 Juli                  66517
##  8 Agustus               57905
##  9 September             15975
## 10 Oktober               21374
## # ℹ 110 more rows
View(df)

EXPLORATORY DATA ANALYSIS (EDA)

#Mengecek dimensi data
dim(df)
## [1] 120   2
#Mengecek variabel data
names(df)
## [1] "Bulan"             "Jumlah_Pengunjung"
#Mengecek tipe data tiap variabel
str(df)
## tibble [120 × 2] (S3: tbl_df/tbl/data.frame)
##  $ Bulan            : chr [1:120] "Januari" "Februari" "Maret" "April" ...
##  $ Jumlah_Pengunjung: num [1:120] 47237 18009 17462 17974 24137 ...
#Statistika deskriptif
stat.desc(df)
##          Bulan Jumlah_Pengunjung
## nbr.val     NA      1.200000e+02
## nbr.null    NA      3.000000e+00
## nbr.na      NA      0.000000e+00
## min         NA      0.000000e+00
## max         NA      1.929000e+05
## range       NA      1.929000e+05
## sum         NA      4.634130e+06
## median      NA      3.056200e+04
## mean        NA      3.861775e+04
## SE.mean     NA      2.947106e+03
## CI.mean     NA      5.835563e+03
## var         NA      1.042252e+09
## std.dev     NA      3.228392e+04
## coef.var    NA      8.359867e-01
summary(df)
##     Bulan           Jumlah_Pengunjung
##  Length:120         Min.   :     0   
##  Class :character   1st Qu.: 19870   
##  Mode  :character   Median : 30562   
##                     Mean   : 38618   
##                     3rd Qu.: 45631   
##                     Max.   :192900
#Menghitung nilai variansi
var(df$Jumlah_Pengunjung)
## [1] 1042251770
#Menghitung nilai standar deviasi
sqrt(var(df$Jumlah_Pengunjung))
## [1] 32283.92
#Mengubah data ke data time series
tsdata <- ts(df$Jumlah_Pengunjung, start = c(2014, 1), end = c(2023, 12), frequency = 12)
tsdata
##         Jan    Feb    Mar    Apr    May    Jun    Jul    Aug    Sep    Oct
## 2014  47237  18009  17462  17974  24137  28620  66517  57905  15975  21374
## 2015  50717  22139  22571  24334  39411  24253 118361  34141  24731  25585
## 2016  71349  27932  27068  24150  52206  13814 140578  26706  30904  30941
## 2017  91912  30414  33408  45319  41580 106245 100674  30121  40614  27611
## 2018  71755  34453  39112  48870  37354 176599  77002  37200  45403  33180
## 2019  64179  35357  41663  51756  16184 192900  83148  33242  39699  40015
## 2020  60961  30710  17779      0      0      0   2481  29679  14428  17690
## 2021  12787   5718  13184  10387  46598  24281    336    175   5416  16706
## 2022  39566  21653  22694   4233 115430  45257  41466  19804  19892  18482
## 2023  38087  23004  20936  53182  43658  37191  37456  13467  21674  20818
##         Nov    Dec
## 2014  22328  46315
## 2015  21433  53813
## 2016  26229  66107
## 2017  20591  64931
## 2018  36639  78096
## 2019  40052 108701
## 2020  15964  10926
## 2021  15982  27019
## 2022  15320  38087
## 2023  16524  47737
#Plot data time series
plot.ts(tsdata, xlab = "Tahun", ylab = "Jumlah Pengunjung")
#Menambahkan axis untuk tahun
axis(1, at = seq(2014, 2023, by = 1), labels = seq(2014, 2023, by = 1))

PREPROCESSING DATA

#Untuk data time series hanya digunakan variabel jumlah pengunjung
data <- df$Jumlah_Pengunjung
data
##   [1]  47237  18009  17462  17974  24137  28620  66517  57905  15975  21374
##  [11]  22328  46315  50717  22139  22571  24334  39411  24253 118361  34141
##  [21]  24731  25585  21433  53813  71349  27932  27068  24150  52206  13814
##  [31] 140578  26706  30904  30941  26229  66107  91912  30414  33408  45319
##  [41]  41580 106245 100674  30121  40614  27611  20591  64931  71755  34453
##  [51]  39112  48870  37354 176599  77002  37200  45403  33180  36639  78096
##  [61]  64179  35357  41663  51756  16184 192900  83148  33242  39699  40015
##  [71]  40052 108701  60961  30710  17779      0      0      0   2481  29679
##  [81]  14428  17690  15964  10926  12787   5718  13184  10387  46598  24281
##  [91]    336    175   5416  16706  15982  27019  39566  21653  22694   4233
## [101] 115430  45257  41466  19804  19892  18482  15320  38087  38087  23004
## [111]  20936  53182  43658  37191  37456  13467  21674  20818  16524  47737
#Mengecek missing value
is.na(data)
##   [1] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
#Cek outlier pada data
boxplot(data, ylab = "Jumlah Pengunjung")

#Identifikasi outlier (contoh menggunakan metode IQR)
Q1 <- quantile(data, 0.25)
Q3 <- quantile(data, 0.75)

IQR <- IQR(data)

lower_bound <- Q1 - 1.5 * IQR
upper_bound <- Q3 + 1.5 * IQR

#Menampilkan data outlier
outliers <- data[data < lower_bound | data > upper_bound]
outliers
## [1] 118361 140578  91912 106245 100674 176599 192900 108701 115430
#Transformasi data
transform <- sqrt(data)
transform
##   [1] 217.34075 134.19762 132.14386 134.06715 155.36087 169.17447 257.90890
##   [8] 240.63458 126.39225 146.19850 149.42557 215.20920 225.20435 148.79180
##  [15] 150.23648 155.99359 198.52204 155.73375 344.03634 184.77283 157.26093
##  [22] 159.95312 146.40014 231.97629 267.11234 167.12869 164.52355 155.40270
##  [29] 228.48632 117.53297 374.93733 163.41971 175.79534 175.90054 161.95370
##  [36] 257.11282 303.16992 174.39610 182.77855 212.88260 203.91175 325.95245
##  [43] 317.29166 173.55403 201.52915 166.16558 143.49564 254.81562 267.87124
##  [50] 185.61519 197.76754 221.06560 193.27183 420.23684 277.49234 192.87302
##  [57] 213.07980 182.15378 191.41317 279.45662 253.33575 188.03457 204.11516
##  [64] 227.49945 127.21635 439.20383 288.35395 182.32389 199.24608 200.03750
##  [71] 200.12996 329.69835 246.90281 175.24269 133.33792   0.00000   0.00000
##  [78]   0.00000  49.80964 172.27594 120.11661 133.00376 126.34872 104.52751
##  [85] 113.07962  75.61746 114.82160 101.91663 215.86570 155.82362  18.33030
##  [92]  13.22876  73.59348 129.25169 126.41994 164.37457 198.91204 147.14958
##  [99] 150.64528  65.06151 339.74991 212.73693 203.63202 140.72669 141.03900
## [106] 135.94852 123.77399 195.15891 195.15891 151.67070 144.69278 230.61223
## [113] 208.94497 192.84968 193.53553 116.04740 147.22092 144.28444 128.54571
## [120] 218.48799
#Simpan hasil gabungan data ke dalam data frame baru
result <- data.frame(transform = transform)

#Tentukan lokasi dan nama file untuk menyimpan hasil
file_name <- "Hasil_transformasidata_akarkuadrat.csv"

#Menyimpan hasil ke dalam file CSV
write.csv(result, file = file_name, row.names = FALSE)

#Tampilkan pesan konfirmasi
print(paste("Hasil telah disimpan dalam file", file_name))
## [1] "Hasil telah disimpan dalam file Hasil_transformasidata_akarkuadrat.csv"
#Cek outlier pada data setelah transformasi data
boxplot(transform, ylab = "Jumlah Pengunjung")

#Identifikasi outlier setelah transformasi data (contoh menggunakan metode IQR)
Q1_t <- quantile(transform, 0.25)
Q3_t <- quantile(transform, 0.75)

IQR_t <- IQR(transform)

lower_bound_t <- Q1_t - 1.5 * IQR_t
upper_bound_t <- Q3_t + 1.5 * IQR_t

#Menampilkan data outlier
outliers1 <- transform[transform < lower_bound_t | transform > upper_bound_t]
outliers1
##  [1] 344.03634 374.93733 325.95245 420.23684 439.20383 329.69835   0.00000
##  [8]   0.00000   0.00000  18.33030  13.22876 339.74991
#Mengubah data ke data time series
datats <- ts(transform, start = c(2014, 1), end = c(2023, 12), frequency = 12)
datats
##            Jan       Feb       Mar       Apr       May       Jun       Jul
## 2014 217.34075 134.19762 132.14386 134.06715 155.36087 169.17447 257.90890
## 2015 225.20435 148.79180 150.23648 155.99359 198.52204 155.73375 344.03634
## 2016 267.11234 167.12869 164.52355 155.40270 228.48632 117.53297 374.93733
## 2017 303.16992 174.39610 182.77855 212.88260 203.91175 325.95245 317.29166
## 2018 267.87124 185.61519 197.76754 221.06560 193.27183 420.23684 277.49234
## 2019 253.33575 188.03457 204.11516 227.49945 127.21635 439.20383 288.35395
## 2020 246.90281 175.24269 133.33792   0.00000   0.00000   0.00000  49.80964
## 2021 113.07962  75.61746 114.82160 101.91663 215.86570 155.82362  18.33030
## 2022 198.91204 147.14958 150.64528  65.06151 339.74991 212.73693 203.63202
## 2023 195.15891 151.67070 144.69278 230.61223 208.94497 192.84968 193.53553
##            Aug       Sep       Oct       Nov       Dec
## 2014 240.63458 126.39225 146.19850 149.42557 215.20920
## 2015 184.77283 157.26093 159.95312 146.40014 231.97629
## 2016 163.41971 175.79534 175.90054 161.95370 257.11282
## 2017 173.55403 201.52915 166.16558 143.49564 254.81562
## 2018 192.87302 213.07980 182.15378 191.41317 279.45662
## 2019 182.32389 199.24608 200.03750 200.12996 329.69835
## 2020 172.27594 120.11661 133.00376 126.34872 104.52751
## 2021  13.22876  73.59348 129.25169 126.41994 164.37457
## 2022 140.72669 141.03900 135.94852 123.77399 195.15891
## 2023 116.04740 147.22092 144.28444 128.54571 218.48799
#Plot data time series
plot.ts(datats, xlab = "Tahun", ylab = "Jumlah Pengunjung")
#Menambahkan axis untuk tahun
axis(1, at = seq(2014, 2023, by = 1), labels = seq(2014, 2023, by = 1))

#Statistika deskriptif
stat.desc(datats)
##                         x
## nbr.val        120.000000
## nbr.null         3.000000
## nbr.na           0.000000
## min              0.000000
## max            439.203825
## range          439.203825
## sum          21758.150781
## median         174.819395
## mean           181.317923
## SE.mean          6.946108
## CI.mean.0.95    13.753987
## var           5789.809145
## std.dev         76.090795
## coef.var         0.419654
summary(datats)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##     0.0   141.0   174.8   181.3   213.6   439.2

MODEL PERAMALAN HOLT WINTER’S EXPONENTIAL SMOOTHING ADITIF

vis.data <- decompose(datats)
plot(vis.data)

PERBANDINGAN EVALUASI MODEL (RMSE dan MAD) SETIAP MODEL HOLT WINTER’S EXPONENTIAL SMOOTHING ADITIF

Model HoltWinters yang digunakan yaitu model aditif karena data tersebut memiliki nilai-nol atau nilai yang sangat mendekati nol.

#Model holt winter's exponential smoothing aditif yang mungkin
#Fungsi untuk menghitung RMSE
calculate_rmse <- function(actual, predicted) {
  rmse <- sqrt(mean((actual - predicted)^2))
  return(rmse)
}

#Fungsi untuk menghitung MAD
calculate_mad <- function(actual, predicted) {
  mad <- mean(abs(mean(actual) - predicted))
  return(mad)
}

#Buat vektor nilai alpha, beta, dan gamma yang akan diuji
alpha_values <- seq(0.1, 1, by = 0.1)
beta_values <- seq(0.1, 1, by = 0.1)
gamma_values <- seq(0.1, 1, by = 0.1)

#Buat dataframe untuk menyimpan hasil eksplorasi
parameter_combinations <- expand.grid(alpha = alpha_values, beta = beta_values, gamma = gamma_values)

#Inisialisasi vektor untuk menyimpan hasil evaluasi model
rmse_values <- numeric(nrow(parameter_combinations))
mad_values <- numeric(nrow(parameter_combinations))

#Data aktual
actual_data <- datats

#Loop melalui setiap kombinasi parameter
for (i in seq_len(nrow(parameter_combinations))) {
  current_params <- parameter_combinations[i, ]
  
  #Buat model Holt-Winters dengan parameter saat ini
  current_model <- HoltWinters(datats,
                               alpha = current_params$alpha,
                               beta = current_params$beta,
                               gamma = current_params$gamma,
                               seasonal = "additive")
  
  #Membuat peramalan (fitted values)
  predicted_data <- fitted(current_model)[,1]
  
  #Evaluasi model menggunakan RMSE
  rmse_values[i] <- calculate_rmse(datats, predicted_data)
  
  #Evaluasi model menggunakan MAD
  mad_values[i] <- calculate_mad(datats, predicted_data)
}

#Gabungkan hasil eksplorasi dengan nilai RMSE dan MAD
results <- cbind(parameter_combinations, RMSE = rmse_values, MAD = mad_values)
results
##      alpha beta gamma      RMSE       MAD
## 1      0.1  0.1   0.1  75.17761  54.58271
## 2      0.2  0.1   0.1  73.26560  53.48102
## 3      0.3  0.1   0.1  74.02178  54.95973
## 4      0.4  0.1   0.1  75.46913  56.58491
## 5      0.5  0.1   0.1  77.45235  58.58377
## 6      0.6  0.1   0.1  80.04941  60.81435
## 7      0.7  0.1   0.1  83.36977  63.28968
## 8      0.8  0.1   0.1  87.53811  65.74034
## 9      0.9  0.1   0.1  92.69210  68.18868
## 10     1.0  0.1   0.1  99.00214  71.29286
## 11     0.1  0.2   0.1  76.39494  55.69816
## 12     0.2  0.2   0.1  74.98170  54.31915
## 13     0.3  0.2   0.1  76.35238  55.97396
## 14     0.4  0.2   0.1  77.92148  57.76655
## 15     0.5  0.2   0.1  79.99400  60.15916
## 16     0.6  0.2   0.1  82.76701  62.81181
## 17     0.7  0.2   0.1  86.38549  65.54342
## 18     0.8  0.2   0.1  91.00940  68.02892
## 19     0.9  0.2   0.1  96.80892  70.63449
## 20     1.0  0.2   0.1 103.99346  74.15299
## 21     0.1  0.3   0.1  76.67105  55.98796
## 22     0.2  0.3   0.1  77.23708  54.90750
## 23     0.3  0.3   0.1  78.98123  57.19151
## 24     0.4  0.3   0.1  80.37410  59.40439
## 25     0.5  0.3   0.1  82.47486  61.98294
## 26     0.6  0.3   0.1  85.43067  65.05119
## 27     0.7  0.3   0.1  89.37969  67.90228
## 28     0.8  0.3   0.1  94.52040  70.28062
## 29     0.9  0.3   0.1 101.05800  72.98724
## 30     1.0  0.3   0.1 109.24642  77.03412
## 31     0.1  0.4   0.1  77.16081  54.90267
## 32     0.2  0.4   0.1  80.20424  56.37940
## 33     0.3  0.4   0.1  81.57279  58.90431
## 34     0.4  0.4   0.1  82.63149  61.48700
## 35     0.5  0.4   0.1  84.84168  64.04856
## 36     0.6  0.4   0.1  88.02725  67.38970
## 37     0.7  0.4   0.1  92.35418  70.21934
## 38     0.8  0.4   0.1  98.08684  72.57426
## 39     0.9  0.4   0.1 105.47017  75.45472
## 40     1.0  0.4   0.1 114.81036  80.10395
## 41     0.1  0.5   0.1  78.20405  53.77808
## 42     0.2  0.5   0.1  83.70768  58.76919
## 43     0.3  0.5   0.1  83.73490  61.69003
## 44     0.4  0.5   0.1  84.69852  63.42619
## 45     0.5  0.5   0.1  87.14159  66.32913
## 46     0.6  0.5   0.1  90.58153  69.88586
## 47     0.7  0.5   0.1  95.32784  72.63360
## 48     0.8  0.5   0.1 101.73596  74.67819
## 49     0.9  0.5   0.1 110.08591  77.91491
## 50     1.0  0.5   0.1 120.74454  83.49360
## 51     0.1  0.6   0.1  79.76652  53.54777
## 52     0.2  0.6   0.1  87.26305  60.92373
## 53     0.3  0.6   0.1  85.38741  64.02352
## 54     0.4  0.6   0.1  86.71485  65.27133
## 55     0.5  0.6   0.1  89.42322  68.82671
## 56     0.6  0.6   0.1  93.10305  72.63561
## 57     0.7  0.6   0.1  98.31247  74.91551
## 58     0.8  0.6   0.1 105.49582  76.86426
## 59     0.9  0.6   0.1 114.94953  80.67400
## 60     1.0  0.6   0.1 127.11477  87.28601
## 61     0.1  0.7   0.1  81.97736  54.38865
## 62     0.2  0.7   0.1  90.14209  63.64713
## 63     0.3  0.7   0.1  86.80857  65.77427
## 64     0.4  0.7   0.1  88.78037  67.41447
## 65     0.5  0.7   0.1  91.69200  71.51229
## 66     0.6  0.7   0.1  95.57967  75.24894
## 67     0.7  0.7   0.1 101.31920  76.95133
## 68     0.8  0.7   0.1 109.40042  79.18679
## 69     0.9  0.7   0.1 120.11070  83.15751
## 70     1.0  0.7   0.1 133.99458  92.24392
## 71     0.1  0.8   0.1  85.01975  55.87772
## 72     0.2  0.8   0.1  91.74578  66.58534
## 73     0.3  0.8   0.1  88.31714  67.10195
## 74     0.4  0.8   0.1  90.91901  69.61167
## 75     0.5  0.8   0.1  93.91779  74.28773
## 76     0.6  0.8   0.1  97.99524  77.60641
## 77     0.7  0.8   0.1 104.37089  79.04755
## 78     0.8  0.8   0.1 113.49278  81.34623
## 79     0.9  0.8   0.1 125.62328  85.77113
## 80     1.0  0.8   0.1 141.46670  97.50899
## 81     0.1  0.9   0.1  89.09226  59.13850
## 82     0.2  0.9   0.1  92.06801  68.87260
## 83     0.3  0.9   0.1  90.03863  68.00336
## 84     0.4  0.9   0.1  93.11633  71.82634
## 85     0.5  0.9   0.1  96.05109  77.11039
## 86     0.6  0.9   0.1 100.34876  79.42551
## 87     0.7  0.9   0.1 107.50611  80.88356
## 88     0.8  0.9   0.1 117.82133  83.49175
## 89     0.9  0.9   0.1 131.54195  89.03611
## 90     1.0  0.9   0.1 149.62744 103.18043
## 91     0.1  1.0   0.1  94.26693  63.76344
## 92     0.2  1.0   0.1  91.71786  70.44204
## 93     0.3  1.0   0.1  91.95151  69.43890
## 94     0.4  1.0   0.1  95.33399  74.34351
## 95     0.5  1.0   0.1  98.04245  79.57944
## 96     0.6  1.0   0.1 102.66472  81.08391
## 97     0.7  1.0   0.1 110.77382  82.57476
## 98     0.8  1.0   0.1 122.43355  85.46969
## 99     0.9  1.0   0.1 137.92077  93.03219
## 100    1.0  1.0   0.1 158.59754 109.69688
## 101    0.1  0.1   0.2  73.92294  54.37081
## 102    0.2  0.1   0.2  72.04302  52.33925
## 103    0.3  0.1   0.2  72.42395  53.81869
## 104    0.4  0.1   0.2  73.37033  55.52738
## 105    0.5  0.1   0.2  74.91818  57.29973
## 106    0.6  0.1   0.2  77.21814  59.56861
## 107    0.7  0.1   0.2  80.46444  62.15317
## 108    0.8  0.1   0.2  84.92056  64.82101
## 109    0.9  0.1   0.2  90.93021  67.65509
## 110    1.0  0.1   0.2  99.00214  71.29286
## 111    0.1  0.2   0.2  75.21183  55.04151
## 112    0.2  0.2   0.2  74.12033  53.76164
## 113    0.3  0.2   0.2  75.01386  55.45982
## 114    0.4  0.2   0.2  75.88802  57.20371
## 115    0.5  0.2   0.2  77.40837  58.99736
## 116    0.6  0.2   0.2  79.80289  61.48179
## 117    0.7  0.2   0.2  83.28506  64.33781
## 118    0.8  0.2   0.2  88.17591  67.06562
## 119    0.9  0.2   0.2  94.87980  70.00268
## 120    1.0  0.2   0.2 103.99346  74.15299
## 121    0.1  0.3   0.2  75.76667  55.98871
## 122    0.2  0.3   0.2  76.93999  55.10523
## 123    0.3  0.3   0.2  77.93676  57.20036
## 124    0.4  0.3   0.2  78.34773  59.30127
## 125    0.5  0.3   0.2  79.80703  60.94245
## 126    0.6  0.3   0.2  82.31041  63.71736
## 127    0.7  0.3   0.2  86.05517  66.56025
## 128    0.8  0.3   0.2  91.44387  69.31853
## 129    0.9  0.3   0.2  98.94390  72.28301
## 130    1.0  0.3   0.2 109.24642  77.03412
## 131    0.1  0.4   0.2  76.65368  54.88221
## 132    0.2  0.4   0.2  80.72299  57.42582
## 133    0.3  0.4   0.2  80.70817  59.54215
## 134    0.4  0.4   0.2  80.52274  61.53704
## 135    0.5  0.4   0.2  82.08013  62.98674
## 136    0.6  0.4   0.2  84.73038  65.94678
## 137    0.7  0.4   0.2  88.77345  68.77596
## 138    0.8  0.4   0.2  94.74049  71.49696
## 139    0.9  0.4   0.2 103.15490  74.57693
## 140    1.0  0.4   0.2 114.81036  80.10395
## 141    0.1  0.5   0.2  78.14661  53.80855
## 142    0.2  0.5   0.2  85.27672  60.24691
## 143    0.3  0.5   0.2  82.76317  62.46881
## 144    0.4  0.5   0.2  82.48732  63.49920
## 145    0.5  0.5   0.2  84.30198  65.04821
## 146    0.6  0.5   0.2  87.08193  68.22095
## 147    0.7  0.5   0.2  91.45554  71.05704
## 148    0.8  0.5   0.2  98.09635  73.52146
## 149    0.9  0.5   0.2 107.55618  76.87495
## 150    1.0  0.5   0.2 120.74454  83.49360
## 151    0.1  0.6   0.2  80.33546  53.26683
## 152    0.2  0.6   0.2  89.89864  63.13986
## 153    0.3  0.6   0.2  84.03230  64.63230
## 154    0.4  0.6   0.2  84.48499  65.00362
## 155    0.5  0.6   0.2  86.51527  67.43743
## 156    0.6  0.6   0.2  89.36132  70.64907
## 157    0.7  0.6   0.2  94.11375  73.08296
## 158    0.8  0.6   0.2 101.54606  75.40518
## 159    0.9  0.6   0.2 112.19542  79.40513
## 160    1.0  0.6   0.2 127.11477  87.28601
## 161    0.1  0.7   0.2  83.48322  54.38636
## 162    0.2  0.7   0.2  93.41927  67.29939
## 163    0.3  0.7   0.2  85.04137  66.15463
## 164    0.4  0.7   0.2  86.64089  66.73812
## 165    0.5  0.7   0.2  88.69779  69.85790
## 166    0.6  0.7   0.2  91.54632  73.00530
## 167    0.7  0.7   0.2  96.76606  74.90888
## 168    0.8  0.7   0.2 105.13214  77.53558
## 169    0.9  0.7   0.2 117.12541  81.70326
## 170    1.0  0.7   0.2 133.99458  92.24392
## 171    0.1  0.8   0.2  87.93168  57.58854
## 172    0.2  0.8   0.2  94.81121  71.06063
## 173    0.3  0.8   0.2  86.38425  67.28856
## 174    0.4  0.8   0.2  88.92919  68.79206
## 175    0.5  0.8   0.2  90.79627  72.35329
## 176    0.6  0.8   0.2  93.61758  75.08631
## 177    0.7  0.8   0.2  99.44820  76.66333
## 178    0.8  0.8   0.2 108.90594  79.59852
## 179    0.9  0.8   0.2 122.40267  84.08024
## 180    1.0  0.8   0.2 141.46670  97.50899
## 181    0.1  0.9   0.2  94.00743  62.81656
## 182    0.2  0.9   0.2  94.00920  73.22041
## 183    0.3  0.9   0.2  88.27412  68.39849
## 184    0.4  0.9   0.2  91.28296  70.92329
## 185    0.5  0.9   0.2  92.74580  74.78151
## 186    0.6  0.9   0.2  95.57908  76.63700
## 187    0.7  0.9   0.2 102.21511  78.82918
## 188    0.8  0.9   0.2 112.92243  81.65941
## 189    0.9  0.9   0.2 128.08439  86.70441
## 190    1.0  0.9   0.2 149.62744 103.18043
## 191    0.1  1.0   0.2 101.82016  69.59944
## 192    0.2  1.0   0.2  91.93415  73.14815
## 193    0.3  1.0   0.2  90.58535  69.91976
## 194    0.4  1.0   0.2  93.64113  73.49997
## 195    0.5  1.0   0.2  94.48249  76.82985
## 196    0.6  1.0   0.2  97.47125  78.33146
## 197    0.7  1.0   0.2 105.13111  80.74912
## 198    0.8  1.0   0.2 117.23404  83.79753
## 199    0.9  1.0   0.2 134.22742  89.89233
## 200    1.0  1.0   0.2 158.59754 109.69688
## 201    0.1  0.1   0.3  73.84443  54.78227
## 202    0.2  0.1   0.3  72.22643  52.63551
## 203    0.3  0.1   0.3  72.32158  53.91915
## 204    0.4  0.1   0.3  72.75590  55.39606
## 205    0.5  0.1   0.3  73.80370  57.08008
## 206    0.6  0.1   0.3  75.66507  58.61642
## 207    0.7  0.1   0.3  78.58223  61.19588
## 208    0.8  0.1   0.3  82.95817  64.04422
## 209    0.9  0.1   0.3  89.40995  67.14219
## 210    1.0  0.1   0.3  99.00214  71.29286
## 211    0.1  0.2   0.3  75.26144  55.63640
## 212    0.2  0.2   0.3  74.80835  54.52555
## 213    0.3  0.2   0.3  75.32068  56.00232
## 214    0.4  0.2   0.3  75.44177  57.42454
## 215    0.5  0.2   0.3  76.33836  58.95900
## 216    0.6  0.2   0.3  78.22323  60.58705
## 217    0.7  0.2   0.3  81.30834  63.32258
## 218    0.8  0.2   0.3  86.07071  66.25959
## 219    0.9  0.2   0.3  93.22351  69.41096
## 220    1.0  0.2   0.3 103.99346  74.15299
## 221    0.1  0.3   0.3  76.17433  56.40632
## 222    0.2  0.3   0.3  78.42101  56.30721
## 223    0.3  0.3   0.3  78.72671  57.91301
## 224    0.4  0.3   0.3  77.99338  59.90385
## 225    0.5  0.3   0.3  78.74978  61.07242
## 226    0.6  0.3   0.3  80.69540  62.85725
## 227    0.7  0.3   0.3  83.96888  65.51439
## 228    0.8  0.3   0.3  89.17757  68.43419
## 229    0.9  0.3   0.3  97.13798  71.61668
## 230    1.0  0.3   0.3 109.24642  77.03412
## 231    0.1  0.4   0.3  77.52653  55.12440
## 232    0.2  0.4   0.3  83.39793  59.28706
## 233    0.3  0.4   0.3  81.86180  61.00781
## 234    0.4  0.4   0.3  80.13283  62.07579
## 235    0.5  0.4   0.3  81.03723  63.06304
## 236    0.6  0.4   0.3  83.08337  65.05274
## 237    0.7  0.4   0.3  86.56060  67.65064
## 238    0.8  0.4   0.3  92.29467  70.50167
## 239    0.9  0.4   0.3 101.18768  73.76126
## 240    1.0  0.4   0.3 114.81036  80.10395
## 241    0.1  0.5   0.3  79.59318  54.22604
## 242    0.2  0.5   0.3  89.58350  63.13030
## 243    0.3  0.5   0.3  83.90173  64.20701
## 244    0.4  0.5   0.3  82.02231  63.96727
## 245    0.5  0.5   0.3  83.32642  64.93204
## 246    0.6  0.5   0.3  85.40473  67.24333
## 247    0.7  0.5   0.3  89.09349  69.78315
## 248    0.8  0.5   0.3  95.45486  72.47201
## 249    0.9  0.5   0.3 105.41938  75.95685
## 250    1.0  0.5   0.3 120.74454  83.49360
## 251    0.1  0.6   0.3  82.63868  54.61207
## 252    0.2  0.6   0.3  96.05212  67.53130
## 253    0.3  0.6   0.3  84.73123  66.29782
## 254    0.4  0.6   0.3  84.06456  65.53750
## 255    0.5  0.6   0.3  85.66655  67.27007
## 256    0.6  0.6   0.3  87.63858  69.32978
## 257    0.7  0.6   0.3  91.57581  71.71512
## 258    0.8  0.6   0.3  98.69863  74.21288
## 259    0.9  0.6   0.3 109.88493  78.36713
## 260    1.0  0.6   0.3 127.11477  87.28601
## 261    0.1  0.7   0.3  87.12054  58.03631
## 262    0.2  0.7   0.3 101.05227  73.34851
## 263    0.3  0.7   0.3  85.14499  66.79937
## 264    0.4  0.7   0.3  86.45555  67.29805
## 265    0.5  0.7   0.3  88.00336  69.68934
## 266    0.6  0.7   0.3  89.74544  71.51459
## 267    0.7  0.7   0.3  94.02793  73.54017
## 268    0.8  0.7   0.3 102.07745  76.29682
## 269    0.9  0.7   0.3 114.64133  80.62394
## 270    1.0  0.7   0.3 133.99458  92.24392
## 271    0.1  0.8   0.3  93.60500  63.53316
## 272    0.2  0.8   0.3 102.94086  77.93026
## 273    0.3  0.8   0.3  86.13570  67.60834
## 274    0.4  0.8   0.3  89.12220  69.26472
## 275    0.5  0.8   0.3  90.25288  72.12332
## 276    0.6  0.8   0.3  91.69489  73.53000
## 277    0.7  0.8   0.3  96.49539  75.45662
## 278    0.8  0.8   0.3 105.65258  78.66446
## 279    0.9  0.8   0.3 119.74893  83.03659
## 280    1.0  0.8   0.3 141.46670  97.50899
## 281    0.1  0.9   0.3 102.59533  70.37473
## 282    0.2  0.9   0.3 101.52239  80.68955
## 283    0.3  0.9   0.3  88.14933  68.79683
## 284    0.4  0.9   0.3  91.92244  71.77058
## 285    0.5  0.9   0.3  92.33268  74.21392
## 286    0.6  0.9   0.3  93.48603  75.36697
## 287    0.7  0.9   0.3  99.04958  77.68390
## 288    0.8  0.9   0.3 109.48775  80.92627
## 289    0.9  0.9   0.3 125.26853  85.76743
## 290    1.0  0.9   0.3 149.62744 103.18043
## 291    0.1  1.0   0.3 114.23553  79.44582
## 292    0.2  1.0   0.3  97.83777  79.23241
## 293    0.3  1.0   0.3  90.99947  70.41609
## 294    0.4  1.0   0.3  94.75850  74.63522
## 295    0.5  1.0   0.3  94.16402  76.09926
## 296    0.6  1.0   0.3  95.16407  77.26660
## 297    0.7  1.0   0.3 101.77402  79.93391
## 298    0.8  1.0   0.3 113.64216  83.21823
## 299    0.9  1.0   0.3 131.26078  88.52740
## 300    1.0  1.0   0.3 158.59754 109.69688
## 301    0.1  0.1   0.4  74.32134  55.24714
## 302    0.2  0.1   0.4  73.13292  53.30572
## 303    0.3  0.1   0.4  73.02622  54.70896
## 304    0.4  0.1   0.4  72.95567  55.55071
## 305    0.5  0.1   0.4  73.49938  57.12157
## 306    0.6  0.1   0.4  74.89976  58.37990
## 307    0.7  0.1   0.4  77.39739  60.42275
## 308    0.8  0.1   0.4  81.49645  63.38035
## 309    0.9  0.1   0.4  88.09935  66.64859
## 310    1.0  0.1   0.4  99.00214  71.29286
## 311    0.1  0.2   0.4  75.91603  56.70728
## 312    0.2  0.2   0.4  76.35287  55.82922
## 313    0.3  0.2   0.4  76.58335  56.68002
## 314    0.4  0.2   0.4  75.89048  57.93269
## 315    0.5  0.2   0.4  76.13203  59.25195
## 316    0.6  0.2   0.4  77.49086  60.41108
## 317    0.7  0.2   0.4  80.09222  62.54215
## 318    0.8  0.2   0.4  84.51821  65.56191
## 319    0.9  0.2   0.4  91.80265  68.87748
## 320    1.0  0.2   0.4 103.99346  74.15299
## 321    0.1  0.3   0.4  77.23789  56.84917
## 322    0.2  0.3   0.4  81.00748  58.53769
## 323    0.3  0.3   0.4  80.69535  59.20078
## 324    0.4  0.3   0.4  78.60125  60.67619
## 325    0.5  0.3   0.4  78.60167  61.49913
## 326    0.6  0.3   0.4  79.99465  62.67727
## 327    0.7  0.3   0.4  82.71564  64.72793
## 328    0.8  0.3   0.4  87.52178  67.64460
## 329    0.9  0.3   0.4  95.59636  71.01260
## 330    1.0  0.3   0.4 109.24642  77.03412
## 331    0.1  0.4   0.4  79.11336  55.81525
## 332    0.2  0.4   0.4  87.63462  62.97476
## 333    0.3  0.4   0.4  84.45220  63.27320
## 334    0.4  0.4   0.4  80.72821  62.83476
## 335    0.5  0.4   0.4  80.95064  63.52427
## 336    0.6  0.4   0.4  82.43446  64.89588
## 337    0.7  0.4   0.4  85.26423  66.85199
## 338    0.8  0.4   0.4  90.52229  69.65417
## 339    0.9  0.4   0.4  99.51681  73.12085
## 340    1.0  0.4   0.4 114.81036  80.10395
## 341    0.1  0.5   0.4  81.90033  55.47489
## 342    0.2  0.5   0.4  96.20969  68.67361
## 343    0.3  0.5   0.4  86.67191  66.93532
## 344    0.4  0.5   0.4  82.52574  64.64873
## 345    0.5  0.5   0.4  83.38057  65.47422
## 346    0.6  0.5   0.4  84.83255  66.96639
## 347    0.7  0.5   0.4  87.74050  68.83941
## 348    0.8  0.5   0.4  93.55328  71.61638
## 349    0.9  0.5   0.4 103.61430  75.20997
## 350    1.0  0.5   0.4 120.74454  83.49360
## 351    0.1  0.6   0.4  86.09703  58.15525
## 352    0.2  0.6   0.4 105.61797  73.99469
## 353    0.3  0.6   0.4  87.11446  68.95833
## 354    0.4  0.6   0.4  84.60711  66.25922
## 355    0.5  0.6   0.4  85.96470  67.83011
## 356    0.6  0.6   0.4  87.14938  69.06401
## 357    0.7  0.6   0.4  90.14461  70.78696
## 358    0.8  0.6   0.4  96.65968  73.56102
## 359    0.9  0.6   0.4 107.94527  77.46434
## 360    1.0  0.6   0.4 127.11477  87.28601
## 361    0.1  0.7   0.4  92.43549  63.98129
## 362    0.2  0.7   0.4 113.35455  81.28263
## 363    0.3  0.7   0.4  86.78872  68.49716
## 364    0.4  0.7   0.4  87.29575  68.02625
## 365    0.5  0.7   0.4  88.61730  70.36043
## 366    0.6  0.7   0.4  89.32341  71.07043
## 367    0.7  0.7   0.4  92.49479  73.05311
## 368    0.8  0.7   0.4  99.90125  75.76553
## 369    0.9  0.7   0.4 112.57157  79.63803
## 370    1.0  0.7   0.4 133.99458  92.24392
## 371    0.1  0.8   0.4 101.80049  71.80801
## 372    0.2  0.8   0.4 116.92691  88.55583
## 373    0.3  0.8   0.4  87.15954  68.65804
## 374    0.4  0.8   0.4  90.48756  70.40818
## 375    0.5  0.8   0.4  91.21860  73.02256
## 376    0.6  0.8   0.4  91.30826  73.14943
## 377    0.7  0.8   0.4  94.84265  75.12754
## 378    0.8  0.8   0.4 103.34960  78.30583
## 379    0.9  0.8   0.4 117.55804  82.29242
## 380    1.0  0.8   0.4 141.46670  97.50899
## 381    0.1  0.9   0.4 114.91590  81.09591
## 382    0.2  0.9   0.4 116.10519  92.09340
## 383    0.3  0.9   0.4  89.11561  69.24314
## 384    0.4  0.9   0.4  93.95352  73.22931
## 385    0.5  0.9   0.4  93.66843  75.24823
## 386    0.6  0.9   0.4  93.09188  75.20965
## 387    0.7  0.9   0.4  97.27434  77.28172
## 388    0.8  0.9   0.4 107.07838  80.64901
## 389    0.9  0.9   0.4 122.96975  85.02944
## 390    1.0  0.9   0.4 149.62744 103.18043
## 391    0.1  1.0   0.4 131.84727  93.11124
## 392    0.2  1.0   0.4 112.06667  90.57259
## 393    0.3  1.0   0.4  92.50311  70.89561
## 394    0.4  1.0   0.4  97.54821  76.07443
## 395    0.5  1.0   0.4  95.87822  77.60191
## 396    0.6  1.0   0.4  94.71442  77.23824
## 397    0.7  1.0   0.4  99.89472  79.37286
## 398    0.8  1.0   0.4 111.15479  83.12583
## 399    0.9  1.0   0.4 128.87157  87.87326
## 400    1.0  1.0   0.4 158.59754 109.69688
## 401    0.1  0.1   0.5  75.13370  55.97810
## 402    0.2  0.1   0.5  74.50298  54.67068
## 403    0.3  0.1   0.5  74.25890  55.74207
## 404    0.4  0.1   0.5  73.66700  55.96219
## 405    0.5  0.1   0.5  73.69052  57.36527
## 406    0.6  0.1   0.5  74.63240  58.52211
## 407    0.7  0.1   0.5  76.69015  59.89304
## 408    0.8  0.1   0.5  80.41658  62.77802
## 409    0.9  0.1   0.5  86.97038  66.17845
## 410    1.0  0.1   0.5  99.00214  71.29286
## 411    0.1  0.2   0.5  76.95374  57.70781
## 412    0.2  0.2   0.5  78.50480  57.31666
## 413    0.3  0.2   0.5  78.54649  58.15691
## 414    0.4  0.2   0.5  76.93817  58.69239
## 415    0.5  0.2   0.5  76.46141  59.76216
## 416    0.6  0.2   0.5  77.29330  60.68420
## 417    0.7  0.2   0.5  79.39476  62.09152
## 418    0.8  0.2   0.5  83.38476  64.92038
## 419    0.9  0.2   0.5  90.58464  68.39372
## 420    1.0  0.2   0.5 103.99346  74.15299
## 421    0.1  0.3   0.5  78.72455  57.92627
## 422    0.2  0.3   0.5  84.49361  61.47270
## 423    0.3  0.3   0.5  83.65173  61.24413
## 424    0.4  0.3   0.5  79.89143  61.59382
## 425    0.5  0.3   0.5  79.01895  62.00291
## 426    0.6  0.3   0.5  79.86960  63.04435
## 427    0.7  0.3   0.5  82.02896  64.31699
## 428    0.8  0.3   0.5  86.32630  66.96376
## 429    0.9  0.3   0.5  94.28109  70.51680
## 430    1.0  0.3   0.5 109.24642  77.03412
## 431    0.1  0.4   0.5  81.20040  57.26067
## 432    0.2  0.4   0.5  93.32407  67.74624
## 433    0.3  0.4   0.5  88.39893  66.02907
## 434    0.4  0.4   0.5  82.04629  63.78713
## 435    0.5  0.4   0.5  81.45453  63.98733
## 436    0.6  0.4   0.5  82.41575  65.27803
## 437    0.7  0.4   0.5  84.59159  66.33951
## 438    0.8  0.4   0.5  89.25494  68.96093
## 439    0.9  0.4   0.5  98.09794  72.57306
## 440    1.0  0.4   0.5 114.81036  80.10395
## 441    0.1  0.5   0.5  84.91245  58.22930
## 442    0.2  0.5   0.5 105.22209  76.02519
## 443    0.3  0.5   0.5  91.13835  70.84727
## 444    0.4  0.5   0.5  83.73821  65.45644
## 445    0.5  0.5   0.5  84.07073  66.13148
## 446    0.6  0.5   0.5  84.96704  67.34655
## 447    0.7  0.5   0.5  87.07703  68.20495
## 448    0.8  0.5   0.5  92.20332  71.05887
## 449    0.9  0.5   0.5 102.08899  74.54218
## 450    1.0  0.5   0.5 120.74454  83.49360
## 451    0.1  0.6   0.5  90.66096  63.50614
## 452    0.2  0.6   0.5 118.95172  83.80507
## 453    0.3  0.6   0.5  91.44074  72.56743
## 454    0.4  0.6   0.5  85.83419  67.24608
## 455    0.5  0.6   0.5  86.98728  68.64987
## 456    0.6  0.6   0.5  87.46646  69.56102
## 457    0.7  0.6   0.5  89.47420  70.61880
## 458    0.8  0.6   0.5  95.21918  73.45408
## 459    0.9  0.6   0.5 106.31539  76.70285
## 460    1.0  0.6   0.5 127.11477  87.28601
## 461    0.1  0.7   0.5  99.56572  71.20338
## 462    0.2  0.7   0.5 131.03722  92.37084
## 463    0.3  0.7   0.5  90.40189  71.67041
## 464    0.4  0.7   0.5  88.85584  69.00469
## 465    0.5  0.7   0.5  90.09306  71.73001
## 466    0.6  0.7   0.5  89.82785  71.80834
## 467    0.7  0.7   0.5  91.79556  72.89049
## 468    0.8  0.7   0.5  98.36963  75.63940
## 469    0.9  0.7   0.5 110.84414  79.14485
## 470    1.0  0.7   0.5 133.99458  92.24392
## 471    0.1  0.8   0.5 112.97501  81.91015
## 472    0.2  0.8   0.5 137.75225 102.17883
## 473    0.3  0.8   0.5  89.88285  70.68465
## 474    0.4  0.8   0.5  92.69639  71.85611
## 475    0.5  0.8   0.5  93.23016  74.51325
## 476    0.6  0.8   0.5  91.98707  73.78503
## 477    0.7  0.8   0.5  94.09544  74.93513
## 478    0.8  0.8   0.5 101.73679  78.03053
## 479    0.9  0.8   0.5 115.74506  81.91121
## 480    1.0  0.8   0.5 141.46670  97.50899
## 481    0.1  0.9   0.5 131.82568  94.72333
## 482    0.2  0.9   0.5 138.99772 107.12928
## 483    0.3  0.9   0.5  91.49019  70.33726
## 484    0.4  0.9   0.5  97.03720  75.24167
## 485    0.5  0.9   0.5  96.28051  77.48942
## 486    0.6  0.9   0.5  93.91923  76.04531
## 487    0.7  0.9   0.5  96.47273  76.83112
## 488    0.8  0.9   0.5 105.40525  80.56087
## 489    0.9  0.9   0.5 121.08786  84.70087
## 490    1.0  0.9   0.5 149.62744 103.18043
## 491    0.1  1.0   0.5 155.77027 109.49939
## 492    0.2  1.0   0.5 136.64599 107.41079
## 493    0.3  1.0   0.5  95.31555  71.68298
## 494    0.4  1.0   0.5 101.68036  78.64780
## 495    0.5  1.0   0.5  99.15095  80.52751
## 496    0.6  1.0   0.5  95.65382  78.09402
## 497    0.7  1.0   0.5  99.05523  78.75414
## 498    0.8  1.0   0.5 109.45170  83.27795
## 499    0.9  1.0   0.5 126.94197  87.52537
## 500    1.0  1.0   0.5 158.59754 109.69688
## 501    0.1  0.1   0.6  76.21598  56.97721
## 502    0.2  0.1   0.6  76.25043  55.86827
## 503    0.3  0.1   0.6  75.92030  56.83767
## 504    0.4  0.1   0.6  74.76761  56.95814
## 505    0.5  0.1   0.6  74.22604  57.64923
## 506    0.6  0.1   0.6  74.69976  58.74778
## 507    0.7  0.1   0.6  76.31685  59.79856
## 508    0.8  0.1   0.6  79.62881  62.22787
## 509    0.9  0.1   0.6  85.99854  65.74385
## 510    1.0  0.1   0.6  99.00214  71.29286
## 511    0.1  0.2   0.6  78.30764  58.85168
## 512    0.2  0.2   0.6  81.19786  59.59476
## 513    0.3  0.2   0.6  81.14260  60.02224
## 514    0.4  0.2   0.6  78.48098  59.68670
## 515    0.5  0.2   0.6  77.17609  60.26795
## 516    0.6  0.2   0.6  77.45820  60.99996
## 517    0.7  0.2   0.6  79.06003  62.00179
## 518    0.8  0.2   0.6  82.57056  64.32944
## 519    0.9  0.2   0.6  89.54122  67.98351
## 520    1.0  0.2   0.6 103.99346  74.15299
## 521    0.1  0.3   0.6  80.56971  58.72818
## 522    0.2  0.3   0.6  88.86753  65.19160
## 523    0.3  0.3   0.6  87.60439  64.32359
## 524    0.4  0.3   0.6  81.79021  62.77410
## 525    0.5  0.3   0.6  79.85182  62.57254
## 526    0.6  0.3   0.6  80.13720  63.46415
## 527    0.7  0.3   0.6  81.73961  64.22088
## 528    0.8  0.3   0.6  85.48040  66.39166
## 529    0.9  0.3   0.6  93.15954  70.06907
## 530    1.0  0.3   0.6 109.24642  77.03412
## 531    0.1  0.4   0.6  83.76315  58.85534
## 532    0.2  0.4   0.6 100.54627  73.13521
## 533    0.3  0.4   0.6  93.83230  69.80580
## 534    0.4  0.4   0.6  84.05076  65.09251
## 535    0.5  0.4   0.6  82.39592  64.44839
## 536    0.6  0.4   0.6  82.83220  65.79856
## 537    0.7  0.4   0.6  84.35989  66.32546
## 538    0.8  0.4   0.6  88.37032  68.50954
## 539    0.9  0.4   0.6  96.89337  72.05079
## 540    1.0  0.4   0.6 114.81036  80.10395
## 541    0.1  0.5   0.6  88.68096  62.86529
## 542    0.2  0.5   0.6 116.82308  84.44125
## 543    0.3  0.5   0.6  97.57230  75.67849
## 544    0.4  0.5   0.6  85.65452  66.73447
## 545    0.5  0.5   0.6  85.23694  66.79788
## 546    0.6  0.5   0.6  85.60191  68.11788
## 547    0.7  0.5   0.6  86.90760  68.49778
## 548    0.8  0.5   0.6  91.27071  70.97110
## 549    0.9  0.5   0.6 100.79976  73.97327
## 550    1.0  0.5   0.6 120.74454  83.49360
## 551    0.1  0.6   0.6  96.50650  69.58410
## 552    0.2  0.6   0.6 136.44717  95.75725
## 553    0.3  0.6   0.6  98.19524  77.67312
## 554    0.4  0.6   0.6  87.74743  68.16424
## 555    0.5  0.6   0.6  88.56824  69.73542
## 556    0.6  0.6   0.6  88.37545  70.63938
## 557    0.7  0.6   0.6  89.35844  70.75599
## 558    0.8  0.6   0.6  94.23064  73.31547
## 559    0.9  0.6   0.6 104.94455  76.22179
## 560    1.0  0.6   0.6 127.11477  87.28601
## 561    0.1  0.7   0.6 108.93400  79.26400
## 562    0.2  0.7   0.6 154.75946 106.98377
## 563    0.3  0.7   0.6  96.76312  76.82026
## 564    0.4  0.7   0.6  91.13606  70.11510
## 565    0.5  0.7   0.6  92.26369  73.16665
## 566    0.6  0.7   0.6  91.04146  73.10940
## 567    0.7  0.7   0.6  91.71669  72.92774
## 568    0.8  0.7   0.6  97.32382  75.52749
## 569    0.9  0.7   0.6 109.40006  78.93817
## 570    1.0  0.7   0.6 133.99458  92.24392
## 571    0.1  0.8   0.6 127.99205  93.16672
## 572    0.2  0.8   0.6 166.20303 119.88856
## 573    0.3  0.8   0.6  95.20869  75.46255
## 574    0.4  0.8   0.6  95.75234  73.52112
## 575    0.5  0.8   0.6  96.12722  76.77205
## 576    0.6  0.8   0.6  93.51864  75.20132
## 577    0.7  0.8   0.6  94.03684  74.79191
## 578    0.8  0.8   0.6 100.64311  77.81156
## 579    0.9  0.8   0.6 114.24139  81.93423
## 580    1.0  0.8   0.6 141.46670  97.50899
## 581    0.1  0.9   0.6 154.68809 110.68684
## 582    0.2  0.9   0.6 170.73462 125.89173
## 583    0.3  0.9   0.6  96.08697  74.17444
## 584    0.4  0.9   0.6 101.18544  77.70090
## 585    0.5  0.9   0.6 100.02056  80.37646
## 586    0.6  0.9   0.6  95.77220  77.63003
## 587    0.7  0.9   0.6  96.42836  76.53888
## 588    0.8  0.9   0.6 104.28560  80.32977
## 589    0.9  0.9   0.6 119.54327  84.70808
## 590    1.0  0.9   0.6 149.62744 103.18043
## 591    0.1  1.0   0.6 187.61052 129.33702
## 592    0.2  1.0   0.6 171.93215 128.54806
## 593    0.3  1.0   0.6 100.16694  75.49854
## 594    0.4  1.0   0.6 107.18208  82.60167
## 595    0.5  1.0   0.6 103.84639  84.20557
## 596    0.6  1.0   0.6  97.82199  79.67973
## 597    0.7  1.0   0.6  99.04282  78.08742
## 598    0.8  1.0   0.6 108.33962  83.11019
## 599    0.9  1.0   0.6 125.38009  87.37481
## 600    1.0  1.0   0.6 158.59754 109.69688
## 601    0.1  0.1   0.7  77.56081  58.17856
## 602    0.2  0.1   0.7  78.36004  57.79290
## 603    0.3  0.1   0.7  77.98501  58.43676
## 604    0.4  0.1   0.7  76.21798  58.16687
## 605    0.5  0.1   0.7  75.04009  58.10110
## 606    0.6  0.1   0.7  75.01424  58.99422
## 607    0.7  0.1   0.7  76.18611  59.94342
## 608    0.8  0.1   0.7  79.06634  61.71923
## 609    0.9  0.1   0.7  85.16259  65.39110
## 610    1.0  0.1   0.7  99.00214  71.29286
## 611    0.1  0.2   0.7  79.96944  59.87285
## 612    0.2  0.2   0.7  84.44549  62.25208
## 613    0.3  0.2   0.7  84.37982  63.00990
## 614    0.4  0.2   0.7  80.50253  61.30206
## 615    0.5  0.2   0.7  78.21689  60.78252
## 616    0.6  0.2   0.7  77.89586  61.37388
## 617    0.7  0.2   0.7  78.99043  62.26843
## 618    0.8  0.2   0.7  82.00222  63.81734
## 619    0.9  0.2   0.7  88.64810  67.59940
## 620    1.0  0.2   0.7 103.99346  74.15299
## 621    0.1  0.3   0.7  82.77996  59.71925
## 622    0.2  0.3   0.7  94.20836  69.60410
## 623    0.3  0.3   0.7  92.64071  68.66009
## 624    0.4  0.3   0.7  84.31632  64.44502
## 625    0.5  0.3   0.7  81.04936  63.12360
## 626    0.6  0.3   0.7  80.70504  63.90701
## 627    0.7  0.3   0.7  81.74353  64.50763
## 628    0.8  0.3   0.7  84.90377  66.06496
## 629    0.9  0.3   0.7  92.20388  69.64060
## 630    1.0  0.3   0.7 109.24642  77.03412
## 631    0.1  0.4   0.7  86.85878  62.19932
## 632    0.2  0.4   0.7 109.45920  80.01383
## 633    0.3  0.4   0.7 100.96278  75.01583
## 634    0.4  0.4   0.7  86.80653  66.70932
## 635    0.5  0.4   0.7  83.72995  65.12730
## 636    0.6  0.4   0.7  83.58816  66.30617
## 637    0.7  0.4   0.7  84.45902  66.64802
## 638    0.8  0.4   0.7  87.78116  68.36273
## 639    0.9  0.4   0.7  95.87122  71.57963
## 640    1.0  0.4   0.7 114.81036  80.10395
## 641    0.1  0.5   0.7  93.33250  67.61211
## 642    0.2  0.5   0.7 131.20721  94.32541
## 643    0.3  0.5   0.7 106.29500  81.50678
## 644    0.4  0.5   0.7  88.39143  68.35754
## 645    0.5  0.5   0.7  86.83707  67.61260
## 646    0.6  0.5   0.7  86.63958  69.06073
## 647    0.7  0.5   0.7  87.11755  68.99041
## 648    0.8  0.5   0.7  90.66154  70.88244
## 649    0.9  0.5   0.7  99.71012  73.55699
## 650    1.0  0.5   0.7 120.74454  83.49360
## 651    0.1  0.6   0.7 103.88728  76.01450
## 652    0.2  0.6   0.7 158.34828 109.62965
## 653    0.3  0.6   0.7 107.85325  84.28972
## 654    0.4  0.6   0.7  90.49066  70.10198
## 655    0.5  0.6   0.7  90.66858  70.73639
## 656    0.6  0.6   0.7  89.78036  72.08192
## 657    0.7  0.6   0.7  89.68090  71.17128
## 658    0.8  0.6   0.7  93.59416  73.18446
## 659    0.9  0.6   0.7 103.79096  76.09403
## 660    1.0  0.6   0.7 127.11477  87.28601
## 661    0.1  0.7   0.7 121.11629  89.01684
## 662    0.2  0.7   0.7 184.95457 124.82494
## 663    0.3  0.7   0.7 106.68837  84.92829
## 664    0.4  0.7   0.7  94.29455  72.09611
## 665    0.5  0.7   0.7  95.09593  74.91901
## 666    0.6  0.7   0.7  92.87484  74.85068
## 667    0.7  0.7   0.7  92.14400  73.24684
## 668    0.8  0.7   0.7  96.65877  75.39435
## 669    0.9  0.7   0.7 108.19147  78.90528
## 670    1.0  0.7   0.7 133.99458  92.24392
## 671    0.1  0.8   0.7 147.99818 106.86857
## 672    0.2  0.8   0.7 202.90074 140.71250
## 673    0.3  0.8   0.7 104.24678  83.22273
## 674    0.4  0.8   0.7  99.83382  76.43459
## 675    0.5  0.8   0.7  99.88454  79.39144
## 676    0.6  0.8   0.7  95.82636  77.31997
## 677    0.7  0.8   0.7  94.56009  75.06382
## 678    0.8  0.8   0.7  99.95974  77.63337
## 679    0.9  0.8   0.7 112.99227  81.96678
## 680    1.0  0.8   0.7 141.46670  97.50899
## 681    0.1  0.9   0.7 185.22278 130.15462
## 682    0.2  0.9   0.7 211.60725 148.53822
## 683    0.3  0.9   0.7 104.00404  80.74638
## 684    0.4  0.9   0.7 106.59126  81.71572
## 685    0.5  0.9   0.7 104.86884  83.84813
## 686    0.6  0.9   0.7  98.59661  80.10234
## 687    0.7  0.9   0.7  97.04705  76.57248
## 688    0.8  0.9   0.7 103.60883  80.12719
## 689    0.9  0.9   0.7 118.27350  84.81930
## 690    1.0  0.9   0.7 149.62744 103.18043
## 691    0.1  1.0   0.7 229.31535 153.23449
## 692    0.2  1.0   0.7 217.42330 153.13065
## 693    0.3  1.0   0.7 108.07717  83.05036
## 694    0.4  1.0   0.7 114.24724  87.94125
## 695    0.5  1.0   0.7 109.93950  88.44826
## 696    0.6  1.0   0.7 101.20463  82.28192
## 697    0.7  1.0   0.7  99.77948  77.58322
## 698    0.8  1.0   0.7 107.70925  83.13608
## 699    0.9  1.0   0.7 124.11508  87.79551
## 700    1.0  1.0   0.7 158.59754 109.69688
## 701    0.1  0.1   0.8  79.18393  59.31523
## 702    0.2  0.1   0.8  80.84989  60.24362
## 703    0.3  0.1   0.8  80.45743  60.76620
## 704    0.4  0.1   0.8  78.01487  59.70185
## 705    0.5  0.1   0.8  76.10905  58.78311
## 706    0.6  0.1   0.8  75.53162  59.24494
## 707    0.7  0.1   0.8  76.24130  60.13371
## 708    0.8  0.1   0.8  78.68015  61.42662
## 709    0.9  0.1   0.8  84.44421  65.05261
## 710    1.0  0.1   0.8  99.00214  71.29286
## 711    0.1  0.2   0.8  81.95584  60.79854
## 712    0.2  0.2   0.8  88.30535  65.28940
## 713    0.3  0.2   0.8  88.29476  66.38542
## 714    0.4  0.2   0.8  83.02525  63.03531
## 715    0.5  0.2   0.8  79.56926  61.48238
## 716    0.6  0.2   0.8  78.56338  61.76533
## 717    0.7  0.2   0.8  79.12675  62.55590
## 718    0.8  0.2   0.8  81.62656  63.54673
## 719    0.9  0.2   0.8  87.88444  67.24454
## 720    1.0  0.2   0.8 103.99346  74.15299
## 721    0.1  0.3   0.8  85.39647  61.44278
## 722    0.2  0.3   0.8 100.65562  74.73293
## 723    0.3  0.3   0.8  98.87275  73.50515
## 724    0.4  0.3   0.8  87.52844  66.62605
## 725    0.5  0.3   0.8  82.60878  63.97767
## 726    0.6  0.3   0.8  81.53143  64.34608
## 727    0.7  0.3   0.8  81.97892  64.94612
## 728    0.8  0.3   0.8  84.53905  65.82481
## 729    0.9  0.3   0.8  91.39048  69.22913
## 730    1.0  0.3   0.8 109.24642  77.03412
## 731    0.1  0.4   0.8  90.57136  65.59625
## 732    0.2  0.4   0.8 120.27089  87.77676
## 733    0.3  0.4   0.8 110.02236  81.34770
## 734    0.4  0.4   0.8  90.41887  69.22882
## 735    0.5  0.4   0.8  85.46511  66.04196
## 736    0.6  0.4   0.8  84.64283  66.86824
## 737    0.7  0.4   0.8  84.82534  67.29805
## 738    0.8  0.4   0.8  87.42623  68.29059
## 739    0.9  0.4   0.8  95.00475  71.16654
## 740    1.0  0.4   0.8 114.81036  80.10395
## 741    0.1  0.5   0.8  98.99044  72.09820
## 742    0.2  0.5   0.8 148.52302 105.31293
## 743    0.3  0.5   0.8 117.61822  88.69432
## 744    0.4  0.5   0.8  92.12138  70.93962
## 745    0.5  0.5   0.8  88.88738  68.97042
## 746    0.6  0.5   0.8  88.04131  70.02095
## 747    0.7  0.5   0.8  87.64320  69.72609
## 748    0.8  0.5   0.8  90.31136  70.79507
## 749    0.9  0.5   0.8  98.78984  73.48861
## 750    1.0  0.5   0.8 120.74454  83.49360
## 751    0.1  0.6   0.8 113.06484  82.80861
## 752    0.2  0.6   0.8 184.68951 125.09591
## 753    0.3  0.6   0.8 120.76694  92.50368
## 754    0.4  0.6   0.8  94.28496  72.48911
## 755    0.5  0.6   0.8  93.31132  72.96151
## 756    0.6  0.6   0.8  91.64746  73.54194
## 757    0.7  0.6   0.8  90.38079  71.93317
## 758    0.8  0.6   0.8  93.24308  73.04389
## 759    0.9  0.6   0.8 102.82048  75.98202
## 760    1.0  0.6   0.8 127.11477  87.28601
## 761    0.1  0.7   0.8 136.80680  99.78465
## 762    0.2  0.7   0.8 221.75758 145.17819
## 763    0.3  0.7   0.8 120.79002  94.92712
## 764    0.4  0.7   0.8  98.58011  75.14210
## 765    0.5  0.7   0.8  98.61737  77.87704
## 766    0.6  0.7   0.8  95.30300  76.72620
## 767    0.7  0.7   0.8  93.02394  73.70414
## 768    0.8  0.7   0.8  96.30701  75.19189
## 769    0.9  0.7   0.8 107.17988  78.89932
## 770    1.0  0.7   0.8 133.99458  92.24392
## 771    0.1  0.8   0.8 174.37029 123.68627
## 772    0.2  0.8   0.8 248.33442 164.08365
## 773    0.3  0.8   0.8 118.02490  93.88413
## 774    0.4  0.8   0.8 105.22521  80.17068
## 775    0.5  0.8   0.8 104.52734  82.86631
## 776    0.6  0.8   0.8  98.89766  79.86407
## 777    0.7  0.8   0.8  95.62487  75.62320
## 778    0.8  0.8   0.8  99.62010  77.46227
## 779    0.9  0.8   0.8 111.95443  81.99878
## 780    1.0  0.8   0.8 141.46670  97.50899
## 781    0.1  0.9   0.8 225.40073 153.69188
## 782    0.2  0.9   0.8 262.09222 176.30453
## 783    0.3  0.9   0.8 116.40594  90.54694
## 784    0.4  0.9   0.8 113.56312  86.10239
## 785    0.5  0.9   0.8 110.83249  88.34570
## 786    0.6  0.9   0.8 102.39746  83.13071
## 787    0.7  0.9   0.8  98.30813  77.02238
## 788    0.8  0.9   0.8 103.31174  79.90807
## 789    0.9  0.9   0.8 117.23012  85.05118
## 790    1.0  0.9   0.8 149.62744 103.18043
## 791    0.1  1.0   0.8 283.04983 183.37157
## 792    0.2  1.0   0.8 272.73301 181.75288
## 793    0.3  1.0   0.8 120.15430  92.33256
## 794    0.4  1.0   0.8 123.17684  93.62273
## 795    0.5  1.0   0.8 117.39375  93.93323
## 796    0.6  1.0   0.8 105.83603  85.85764
## 797    0.7  1.0   0.8 101.26864  77.95782
## 798    0.8  1.0   0.8 107.50472  83.22956
## 799    0.9  1.0   0.8 123.09311  88.49843
## 800    1.0  1.0   0.8 158.59754 109.69688
## 801    0.1  0.1   0.9  81.11408  60.41985
## 802    0.2  0.1   0.9  83.75934  62.70306
## 803    0.3  0.1   0.9  83.35543  63.30877
## 804    0.4  0.1   0.9  80.17105  61.28917
## 805    0.5  0.1   0.9  77.42939  59.87986
## 806    0.6  0.1   0.9  76.23171  59.61135
## 807    0.7  0.1   0.9  76.44843  60.33783
## 808    0.8  0.1   0.9  78.43465  61.31163
## 809    0.9  0.1   0.9  83.82764  64.72377
## 810    1.0  0.1   0.9  99.00214  71.29286
## 811    0.1  0.2   0.9  84.30038  62.06862
## 812    0.2  0.2   0.9  92.86959  69.09924
## 813    0.3  0.2   0.9  92.93369  70.10424
## 814    0.4  0.2   0.9  86.08974  65.23884
## 815    0.5  0.2   0.9  81.23958  62.61405
## 816    0.6  0.2   0.9  79.44346  62.13444
## 817    0.7  0.2   0.9  79.43433  62.87177
## 818    0.8  0.2   0.9  81.40552  63.50685
## 819    0.9  0.2   0.9  87.23249  66.89933
## 820    1.0  0.2   0.9 103.99346  74.15299
## 821    0.1  0.3   0.9  88.47943  63.81310
## 822    0.2  0.3   0.9 108.40630  80.49182
## 823    0.3  0.3   0.9 106.41026  78.79089
## 824    0.4  0.3   0.9  91.50356  69.16905
## 825    0.5  0.3   0.9  84.54980  65.03483
## 826    0.6  0.3   0.9  82.60212  64.77086
## 827    0.7  0.3   0.9  82.41075  65.37059
## 828    0.8  0.3   0.9  84.34585  65.78969
## 829    0.9  0.3   0.9  90.69941  68.88451
## 830    1.0  0.3   0.9 109.24642  77.03412
## 831    0.1  0.4   0.9  94.97485  69.08892
## 832    0.2  0.4   0.9 133.24806  96.44059
## 833    0.3  0.4   0.9 121.23241  88.25355
## 834    0.4  0.4   0.9  95.00471  72.36375
## 835    0.5  0.4   0.9  87.63604  67.61150
## 836    0.6  0.4   0.9  85.98467  67.55642
## 837    0.7  0.4   0.9  85.42422  67.95365
## 838    0.8  0.4   0.9  87.26326  68.30029
## 839    0.9  0.4   0.9  94.27161  70.95554
## 840    1.0  0.4   0.9 114.81036  80.10395
## 841    0.1  0.5   0.9 105.73419  77.27802
## 842    0.2  0.5   0.9 168.89324 117.46896
## 843    0.3  0.5   0.9 131.83224  97.63958
## 844    0.4  0.5   0.9  97.03505  74.10392
## 845    0.5  0.5   0.9  91.43394  71.05970
## 846    0.6  0.5   0.9  89.79881  71.16993
## 847    0.7  0.5   0.9  88.45211  70.65730
## 848    0.8  0.5   0.9  90.17688  70.72063
## 849    0.9  0.5   0.9  98.01399  73.40138
## 850    1.0  0.5   0.9 120.74454  83.49360
## 851    0.1  0.6   0.9 124.32234  90.49845
## 852    0.2  0.6   0.9 215.31291 142.23953
## 853    0.3  0.6   0.9 137.15762 102.32506
## 854    0.4  0.6   0.9  99.39230  76.43523
## 855    0.5  0.6   0.9  96.55106  75.64021
## 856    0.6  0.6   0.9  93.97342  75.26065
## 857    0.7  0.6   0.9  91.43076  72.93545
## 858    0.8  0.6   0.9  93.13423  72.93591
## 859    0.9  0.6   0.9 102.00544  75.87534
## 860    1.0  0.6   0.9 127.11477  87.28601
## 861    0.1  0.7   0.9 156.87514 112.04531
## 862    0.2  0.7   0.9 264.97581 167.66617
## 863    0.3  0.7   0.9 139.37484 106.39084
## 864    0.4  0.7   0.9 104.29662  79.84972
## 865    0.5  0.7   0.9 102.88340  81.16674
## 866    0.6  0.7   0.9  98.32932  79.09416
## 867    0.7  0.7   0.9  94.33865  74.69244
## 868    0.8  0.7   0.9  96.22692  74.99689
## 869    0.9  0.7   0.9 106.33466  78.84559
## 870    1.0  0.7   0.9 133.99458  92.24392
## 871    0.1  0.8   0.9 208.72786 143.08773
## 872    0.2  0.8   0.9 302.78193 191.57309
## 873    0.3  0.8   0.9 137.25975 107.24852
## 874    0.4  0.8   0.9 112.28234  85.50011
## 875    0.5  0.8   0.9 110.09326  86.65658
## 876    0.6  0.8   0.9 102.74338  82.92660
## 877    0.7  0.8   0.9  97.22923  76.56415
## 878    0.8  0.8   0.9  99.58592  77.22794
## 879    0.9  0.8   0.9 111.09410  81.99672
## 880    1.0  0.8   0.9 141.46670  97.50899
## 881    0.1  0.9   0.9 277.40951 180.90533
## 882    0.2  0.9   0.9 322.92566 206.65652
## 883    0.3  0.9   0.9 134.30188 102.92446
## 884    0.4  0.9   0.9 122.50189  92.41378
## 885    0.5  0.9   0.9 117.89889  92.92288
## 886    0.6  0.9   0.9 107.19345  86.38755
## 887    0.7  0.9   0.9 100.23348  78.04133
## 888    0.8  0.9   0.9 103.36182  79.75893
## 889    0.9  0.9   0.9 116.37613  85.36824
## 890    1.0  0.9   0.9 149.62744 103.18043
## 891    0.1  1.0   0.9 351.12983 219.41280
## 892    0.2  1.0   0.9 338.01532 214.74824
## 893    0.3  1.0   0.9 137.40228 103.78369
## 894    0.4  1.0   0.9 134.38149  99.91238
## 895    0.5  1.0   0.9 126.10226  99.75533
## 896    0.6  1.0   0.9 111.74683  90.00870
## 897    0.7  1.0   0.9 103.56243  79.33469
## 898    0.8  1.0   0.9 107.70307  83.20741
## 899    0.9  1.0   0.9 122.27393  89.07869
## 900    1.0  1.0   0.9 158.59754 109.69688
## 901    0.1  0.1   1.0  83.39472  62.06236
## 902    0.2  0.1   1.0  87.14639  65.21546
## 903    0.3  0.1   1.0  86.70592  66.21568
## 904    0.4  0.1   1.0  82.70730  63.26452
## 905    0.5  0.1   1.0  79.00718  61.14380
## 906    0.6  0.1   1.0  77.10708  60.47185
## 907    0.7  0.1   1.0  76.78782  60.53040
## 908    0.8  0.1   1.0  78.30436  61.40483
## 909    0.9  0.1   1.0  83.29941  64.40586
## 910    1.0  0.1   1.0  99.00214  71.29286
## 911    0.1  0.2   1.0  87.05598  63.66100
## 912    0.2  0.2   1.0  98.26346  73.35637
## 913    0.3  0.2   1.0  98.34621  74.21505
## 914    0.4  0.2   1.0  89.74695  68.09168
## 915    0.5  0.2   1.0  83.24419  64.10274
## 916    0.6  0.2   1.0  80.53228  62.85285
## 917    0.7  0.2   1.0  79.89396  63.19041
## 918    0.8  0.2   1.0  81.31227  63.57181
## 919    0.9  0.2   1.0  86.67722  66.58508
## 920    1.0  0.2   1.0 103.99346  74.15299
## 921    0.1  0.3   1.0  92.09870  66.81757
## 922    0.2  0.3   1.0 117.72033  87.39855
## 923    0.3  0.3   1.0 115.34533  84.37369
## 924    0.4  0.3   1.0  96.32970  72.32560
## 925    0.5  0.3   1.0  86.90251  66.73251
## 926    0.6  0.3   1.0  83.91732  65.46667
## 927    0.7  0.3   1.0  83.02052  65.76830
## 928    0.8  0.3   1.0  84.29619  65.98819
## 929    0.9  0.3   1.0  90.11392  68.68445
## 930    1.0  0.3   1.0 109.24642  77.03412
## 931    0.1  0.4   1.0 100.10876  73.37926
## 932    0.2  0.4   1.0 148.73772 106.25553
## 933    0.3  0.4   1.0 134.78013  96.18274
## 934    0.4  0.4   1.0 100.68036  76.11089
## 935    0.5  0.4   1.0  90.29065  69.45769
## 936    0.6  0.4   1.0  87.61730  68.55711
## 937    0.7  0.4   1.0  86.23868  68.57660
## 938    0.8  0.4   1.0  87.26367  68.47998
## 939    0.9  0.4   1.0  93.65329  70.90407
## 940    1.0  0.4   1.0 114.81036  80.10395
## 941    0.1  0.5   1.0 113.61403  83.13642
## 942    0.2  0.5   1.0 192.46175 130.79694
## 943    0.3  0.5   1.0 149.20834 108.91650
## 944    0.4  0.5   1.0 103.31663  78.70690
## 945    0.5  0.5   1.0  94.53973  73.37883
## 946    0.6  0.5   1.0  91.91880  72.61937
## 947    0.7  0.5   1.0  89.53047  71.55482
## 948    0.8  0.5   1.0  90.22978  70.77808
## 949    0.9  0.5   1.0  97.36221  73.37693
## 950    1.0  0.5   1.0 120.74454  83.49360
## 951    0.1  0.6   1.0 138.08532  99.12961
## 952    0.2  0.6   1.0 249.92522 160.30822
## 953    0.3  0.6   1.0 157.17268 113.77369
## 954    0.4  0.6   1.0 106.08800  81.65717
## 955    0.5  0.6   1.0 100.46110  78.88932
## 956    0.6  0.6   1.0  96.76795  77.35528
## 957    0.7  0.6   1.0  92.82282  74.00282
## 958    0.8  0.6   1.0  93.24080  72.83202
## 959    0.9  0.6   1.0 101.32357  75.85062
## 960    1.0  0.6   1.0 127.11477  87.28601
## 961    0.1  0.7   1.0 182.50967 126.55417
## 962    0.2  0.7   1.0 314.09071 191.96917
## 963    0.3  0.7   1.0 162.48974 119.49158
## 964    0.4  0.7   1.0 111.77790  85.97143
## 965    0.5  0.7   1.0 107.96493  85.33392
## 966    0.6  0.7   1.0 101.96563  81.82370
## 967    0.7  0.7   1.0  96.08998  76.20328
## 968    0.8  0.7   1.0  96.39465  74.95682
## 969    0.9  0.7   1.0 105.63165  78.78874
## 970    1.0  0.7   1.0 133.99458  92.24392
## 971    0.1  0.8   1.0 253.01227 166.23238
## 972    0.2  0.8   1.0 366.18278 221.49783
## 973    0.3  0.8   1.0 162.29516 122.93290
## 974    0.4  0.8   1.0 121.40996  92.48950
## 975    0.5  0.8   1.0 116.62107  91.50023
## 976    0.6  0.8   1.0 107.37546  86.62665
## 977    0.7  0.8   1.0  99.39133  78.17858
## 978    0.8  0.8   1.0  99.83784  77.10645
## 979    0.9  0.8   1.0 110.38528  82.04446
## 980    1.0  0.8   1.0 141.46670  97.50899
## 981    0.1  0.9   1.0 343.71746 214.79112
## 982    0.2  0.9   1.0 394.93769 242.79202
## 983    0.3  0.9   1.0 158.41458 117.53224
## 984    0.4  0.9   1.0 133.89514 100.15998
## 985    0.5  0.9   1.0 126.02405  97.60984
## 986    0.6  0.9   1.0 112.99437  90.49168
## 987    0.7  0.9   1.0 102.86677  79.88984
## 988    0.8  0.9   1.0 103.74625  79.67944
## 989    0.9  0.9   1.0 115.68376  85.70336
## 990    1.0  0.9   1.0 149.62744 103.18043
## 991    0.1  1.0   1.0 436.05927 261.47826
## 992    0.2  1.0   1.0 414.03305 251.50251
## 993    0.3  1.0   1.0 160.60356 118.11119
## 994    0.4  1.0   1.0 148.41411 108.44885
## 995    0.5  1.0   1.0 135.87138 105.77260
## 996    0.6  1.0   1.0 118.94370  94.80452
## 997    0.7  1.0   1.0 106.73906  81.38865
## 998    0.8  1.0   1.0 108.30133  83.27631
## 999    0.9  1.0   1.0 121.62817  89.52571
## 1000   1.0  1.0   1.0 158.59754 109.69688

VERIFIKASI MODEL HOLT WINTER’S EXPONENTIAL SMOOTHING ADITIF

Pemodelan terbaik, yaitu pada fit102(alpha = 0.2, beta = 0.1 dan gamma = 0.2) karena karena memiliki nilai RMSE dan MAD paling kecil dimananilai RMSE sebesar 72.04 dan MAD sebesar 52.33 artinya model HWES Aditif (alpha = 0.2, beta = 0.1 dan gamma = 0.2) merupakan model yang terbaik.

#Model holt winter's exponential smoothing aditif terbaik
fit102 <- HoltWinters(datats, alpha = 0.2, beta = 0.1, gamma = 0.2, seasonal = "additive")

fit102
## Holt-Winters exponential smoothing with trend and additive seasonal component.
## 
## Call:
## HoltWinters(x = datats, alpha = 0.2, beta = 0.1, gamma = 0.2,     seasonal = "additive")
## 
## Smoothing parameters:
##  alpha: 0.2
##  beta : 0.1
##  gamma: 0.2
## 
## Coefficients:
##            [,1]
## a   171.1587263
## b     0.0333926
## s1   44.6877354
## s2  -21.7674022
## s3  -21.4373447
## s4  -29.8114214
## s5   18.6258431
## s6   18.7344995
## s7   24.4047886
## s8  -18.8932133
## s9  -23.2355604
## s10 -18.5673080
## s11 -26.5944149
## s12  42.9890974
fitted(fit102)[,1]
##            Jan       Feb       Mar       Apr       May       Jun       Jul
## 2015 215.35836 141.24234 146.97767 153.33181 197.80336 156.45841 274.38903
## 2016 238.60656 168.23060 171.75169 175.90569 214.96289 175.73354 292.06456
## 2017 254.23764 183.26893 184.15939 187.37958 242.02302 180.89902 364.42578
## 2018 271.77448 178.79008 182.13304 191.51221 235.32295 201.17404 368.68957
## 2019 293.93951 195.58256 197.42397 207.02620 237.16728 229.46173 343.53019
## 2020 297.12834 202.46366 202.58298 197.61860 157.32713 184.40338 162.47906
## 2021 119.09582  29.39964  31.39754  33.39528  55.06929 150.09137 190.12765
## 2022 138.83859  78.07225  98.40671  98.97087 120.06208 208.36935 226.01783
## 2023 221.59029 146.30381 151.26797 125.17543 216.38806 221.95193 225.09053
##            Aug       Sep       Oct       Nov       Dec
## 2015 273.35906 141.51643 164.59791 165.88494 227.84446
## 2016 267.26289 147.65723 168.79846 169.13870 236.66109
## 2017 285.12125 186.30637 202.45905 191.92662 254.88914
## 2018 269.31875 197.57264 205.30335 195.48433 275.96983
## 2019 252.50292 195.62704 193.46172 191.90150 275.56337
## 2020  41.62608  24.81364  31.38182  40.91982 140.73540
## 2021 107.65366  46.94275  46.19046  54.45316 136.87291
## 2022 195.08384 171.83803 177.55194 165.54376 222.26422
## 2023 183.48852 157.41479 161.95269 151.61530 211.70648
#Mengakses residual dari model
residual <- residuals(fit102)
residual
##                Jan           Feb           Mar           Apr           May
## 2015    9.84599388    7.54946552    3.25880546    2.66178326    0.71867512
## 2016   28.50577396   -1.10191105   -7.22813343  -20.50299219   13.52343220
## 2017   48.93227641   -8.87282663   -1.38083161   25.50302137  -38.11127641
## 2018   -3.90324121    6.82511481   15.63450177   29.55338966  -42.05112273
## 2019  -40.60376359   -7.54799324    6.69119764   20.47325545 -109.95092801
## 2020  -50.22552039  -27.22097508  -69.24506472 -197.61860379 -157.32712601
## 2021   -6.01620457   46.21781652   83.42406164   68.52135345  160.79641206
## 2022   60.07344935   69.07732916   52.23856509  -33.90936169  219.68782713
## 2023  -26.43138159    5.36688575   -6.57519271  105.43680310   -7.44309029
##                Jun           Jul           Aug           Sep           Oct
## 2015   -0.72466537   69.64730519  -88.58622293   15.74449612   -4.64479530
## 2016  -58.20056895   82.87276466 -103.84318953   28.13810403    7.10208348
## 2017  145.05343312  -47.13411317 -111.56722180   15.22278148  -36.29347082
## 2018  219.06279534  -91.19722337  -76.44573786   15.50715640  -23.14956564
## 2019  209.74209329  -55.17624440  -70.17902864    3.61903410    6.57577210
## 2020 -184.40337788 -112.66941971  130.64985948   95.30297151  101.62193683
## 2021    5.73224369 -171.79734969  -94.42490148   26.65073062   83.06123258
## 2022    4.36757815  -22.38580812  -54.35715084  -30.79903172  -41.60341541
## 2023  -29.10224333  -31.55499954  -67.44111786  -10.19386494  -17.66824565
##                Nov           Dec
## 2015  -19.48480255    4.13183053
## 2016   -7.18500767   20.45172476
## 2017  -48.43097885   -0.07352327
## 2018   -4.07116468    3.48678242
## 2019    8.22845790   54.13497554
## 2020   85.42890082  -36.20788871
## 2021   71.96677331   27.50166255
## 2022  -41.76977473  -27.10531061
## 2023  -23.06958827    6.78150341
df <- forecast(fit102)
plot(df)

#Plot perbandingan fitted model dengan true model (Data Asli setelah di transformasi)
fitted_modela <- (fitted(fit102)[,1])
#Plot data aktual
plot(datats, type = "l", col = "blue", 
     xlab = "Tahun", ylab = "Jumlah Pengunjung", 
     main = "Fitted vs True (Setelah Transformasi Data) Model HWES Aditif")

#Tambahkan plot peramalan dari model
lines(fitted_modela, type = "l", col = "red")

#Tambahkan legenda
legend("topright", legend = c("Prediksi", "Aktual"), 
       col = c("blue", "red"), lty = c(2, 1))

se <- datats - fitted_modela
se
##                Jan           Feb           Mar           Apr           May
## 2015    9.84599388    7.54946552    3.25880546    2.66178326    0.71867512
## 2016   28.50577396   -1.10191105   -7.22813343  -20.50299219   13.52343220
## 2017   48.93227641   -8.87282663   -1.38083161   25.50302137  -38.11127641
## 2018   -3.90324121    6.82511481   15.63450177   29.55338966  -42.05112273
## 2019  -40.60376359   -7.54799324    6.69119764   20.47325545 -109.95092801
## 2020  -50.22552039  -27.22097508  -69.24506472 -197.61860379 -157.32712601
## 2021   -6.01620457   46.21781652   83.42406164   68.52135345  160.79641206
## 2022   60.07344935   69.07732916   52.23856509  -33.90936169  219.68782713
## 2023  -26.43138159    5.36688575   -6.57519271  105.43680310   -7.44309029
##                Jun           Jul           Aug           Sep           Oct
## 2015   -0.72466537   69.64730519  -88.58622293   15.74449612   -4.64479530
## 2016  -58.20056895   82.87276466 -103.84318953   28.13810403    7.10208348
## 2017  145.05343312  -47.13411317 -111.56722180   15.22278148  -36.29347082
## 2018  219.06279534  -91.19722337  -76.44573786   15.50715640  -23.14956564
## 2019  209.74209329  -55.17624440  -70.17902864    3.61903410    6.57577210
## 2020 -184.40337788 -112.66941971  130.64985948   95.30297151  101.62193683
## 2021    5.73224369 -171.79734969  -94.42490148   26.65073062   83.06123258
## 2022    4.36757815  -22.38580812  -54.35715084  -30.79903172  -41.60341541
## 2023  -29.10224333  -31.55499954  -67.44111786  -10.19386494  -17.66824565
##                Nov           Dec
## 2015  -19.48480255    4.13183053
## 2016   -7.18500767   20.45172476
## 2017  -48.43097885   -0.07352327
## 2018   -4.07116468    3.48678242
## 2019    8.22845790   54.13497554
## 2020   85.42890082  -36.20788871
## 2021   71.96677331   27.50166255
## 2022  -41.76977473  -27.10531061
## 2023  -23.06958827    6.78150341
se_t <- sqrt(mean((se)^2))
se_t
## [1] 72.04302

PERAMALAN DENGAN MODEL HOLT WINTER’S EXPONENTIAL SMOOTHING ADITIF

#Peramalan untuk periode satu tahun ke depan (tahun 2024)
a <- predict(fit102, n.ahead = 12)
a
##           Jan      Feb      Mar      Apr      May      Jun      Jul      Aug
## 2024 215.8799 149.4581 149.8216 141.4809 189.9515 190.0936 195.7973 152.5327
##           Sep      Oct      Nov      Dec
## 2024 148.2237 152.9253 144.9316 214.5485

MODEL PERAMALAN ARIMA

IDENTIFIKASI MODEL DERET WAKTU

#Plot data time series
plot.ts(datats, xlab = "Tahun", ylab = "Jumlah Pengunjung")
#Menambahkan axis untuk tahun
axis(1, at = seq(2014, 2023, by = 1), labels = seq(2014, 2023, by = 1))

#Plot ACF
#Menampilkan plot korelasi ACF
Acf(datats)

#Menampilkan hasil korelasi ACF
print(Acf(datats))

## 
## Autocorrelations of series 'datats', by lag
## 
##      0      1      2      3      4      5      6      7      8      9     10 
##  1.000  0.396  0.237  0.140  0.066  0.238  0.426  0.244  0.121  0.085 -0.033 
##     11     12     13     14     15     16     17     18     19     20     21 
##  0.149  0.412  0.115  0.120  0.024  0.010  0.151  0.219 -0.004 -0.125 -0.088 
##     22     23     24 
## -0.193  0.090  0.054

CEK STATIONERITAS TERHADAP VARIANSI

Untuk melakukan pemeriksaan stationeritas terhadap variansi, maka dapat dilakukan dengan pemeriksaan Box-Cox, dimana nilai lambda harus mendekati 1. Jika nilai lambda mendekati 1 maka data stationer secara variansi.

BoxCox.lambda(datats)
## Warning in guerrero(x, lower, upper): Guerrero's method for selecting a Box-Cox
## parameter (lambda) is given for strictly positive data.
## [1] 0.9597101

Dikarenakan hasil nilai lambda menunjukkan angka 0.95, dimana angka tersebut mendekati 1 maka data dikatakan stationer secara variansi.

CEK STATIONERITAS TERHADAP RATA-RATA

Dalam pemeriksaan stationeritas rata-rata dapat dilakukan uji ADF (Augmented Dickey Fuller) dengan ketentuan :

H0 : Data tidak stationer

H1 : Data stationer

Pengambilan Keputusan : H0 diterima jika p-value > alpha (5% / 0.05)

adf.test(datats)
## 
##  Augmented Dickey-Fuller Test
## 
## data:  datats
## Dickey-Fuller = -3.4139, Lag order = 4, p-value = 0.05559
## alternative hypothesis: stationary

Dari hasil diatas diperoleh p-value > nilai alpha (0.055 > 0.05) sehingga H0 diterima yang berarti data tidak stationer secara rata-rata.

DIFFERENCING

  1. Melakukan differencing karena plot time series sebelumnya terlihat bahwa data belum stationer dalam rata-rata maupun variansi dimana plot time series nya tidak berfluktuasi disekitar titik nol/konstan/masih mengandung tren naik turun.

  2. Melakukan differencing karena pada pemeriksaan stationeritas rataan melalui uji ADF diperoleh p-value > nilai alpha (0.055 > 0.05) dimana artinya data belum stationer terhadap rata-rata.

#Differencing 1 kali
datadiff1 <- diff(datats, differences = 1)
datadiff1
##                Jan           Feb           Mar           Apr           May
## 2014                -83.14313082   -2.05375448    1.92328636   21.29372288
## 2015    9.99515110  -76.41255080    1.44467941    5.75710935   42.52844947
## 2016   35.13604403  -99.98364299   -2.60513838   -9.12085187   73.08362076
## 2017   46.05710365 -128.77381848    8.38245367   30.10404221   -8.97085114
## 2018   13.05561811  -82.25604438   12.15234697   23.29806081  -27.79377183
## 2019  -26.12086999  -65.30117431   16.08059228   23.38428698 -100.28309940
## 2020  -82.79553209  -71.66012603  -41.90477228 -133.33791659    0.00000000
## 2021    8.55210963  -37.46215967   39.20414245  -12.90496816  113.94906639
## 2022   34.53746862  -51.76245710    3.49569496  -85.58376936  274.68839866
## 2023    0.00000000  -43.48821369   -6.97791794   85.91945066  -21.66725981
##                Jun           Jul           Aug           Sep           Oct
## 2014   13.81359592   88.73443249  -17.27432091 -114.24233113   19.80624861
## 2015  -42.78829193  188.30258814 -159.26350179  -27.51190383    2.69218846
## 2016 -110.95334935  257.40435400 -211.51762304   12.37563049    0.10520453
## 2017  122.04070490   -8.66078659 -143.73763776   27.97512805  -35.36357415
## 2018  226.96500869 -142.74449575  -84.61932702   20.20678204  -30.92601607
## 2019  311.98747397 -150.84987595 -106.03006154   16.92219138    0.79141748
## 2020    0.00000000   49.80963762  122.46630382  -52.15933143   12.88714934
## 2021  -60.04208075 -137.49331546   -5.10154622   60.36472142   55.65821447
## 2022 -127.01298128   -9.10490569  -62.90533560    0.31231624   -5.09048203
## 2023  -16.09528644    0.68584406  -77.48812235   31.17351832   -2.93648113
##                Nov           Dec
## 2014    3.22707155   65.78363379
## 2015  -13.55298152   85.57615528
## 2016  -13.94684299   95.15911862
## 2017  -22.66993554  111.31997508
## 2018    9.25938451   88.04344990
## 2019    0.09246129  129.56838920
## 2020   -6.65503557  -21.82121544
## 2021   -2.83175731   37.95463711
## 2022  -12.17453209   71.38492204
## 2023  -15.73872958   89.94227423
#Menampilkan plot time series setelah di differencing 1 kali
plot.ts(datadiff1, xlab = "Tahun", ylab = "Jumlah Pengunjung")
#Menambahkan axis untuk tahun
axis(1, at = seq(2014, 2023, by = 1), labels = seq(2014, 2023, by = 1))

CEK STATIONERITAS TERHADAP RATA-RATA SETELAH DIFFERENCING

Dalam pemeriksaan stationeritas rata-rata dapat dilakukan uji ADF (Augmented Dickey Fuller) dengan ketentuan :

H0 : Data tidak stationer

H1 : Data stationer

Pengambilan Keputusan : H0 diterima jika p-value > alpha (5% / 0.05)

adf.test(datadiff1)
## Warning in adf.test(datadiff1): p-value smaller than printed p-value
## 
##  Augmented Dickey-Fuller Test
## 
## data:  datadiff1
## Dickey-Fuller = -10.619, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary

Dari hasil diatas diperoleh p-value < nilai alpha (0.01 < 0.05) sehingga H0 ditolak yang berarti data stationer secara rata-rata.

PLOT DAN HASIL KORELASI ACF SETELAH DIFFERENCING

Lag yang signifikan ada 9 berarti parameter q = 9.

#Menampilkan plot korelasi ACF
Acf(datadiff1)

#Menampilkan hasil korelasi ACF
print(Acf(datadiff1))

## 
## Autocorrelations of series 'datadiff1', by lag
## 
##      0      1      2      3      4      5      6      7      8      9     10 
##  1.000 -0.362 -0.052 -0.020 -0.204 -0.017  0.303 -0.049 -0.068  0.071 -0.250 
##     11     12     13     14     15     16     17     18     19     20     21 
## -0.072  0.465 -0.244  0.082 -0.068 -0.131  0.060  0.232 -0.083 -0.118  0.114 
##     22     23     24 
## -0.321  0.258  0.133

PLOT DAN HASIL KORELASI PACF SETELAH DIFFERENCING

Lag yang signifikan ada 7 berarti parameter p = 7.

#Menampilkan plot korelasi PACF
Pacf(datadiff1)

#Menampilkan hasil korelasi PACF
print(Pacf(datadiff1))

## 
## Partial autocorrelations of series 'datadiff1', by lag
## 
##      1      2      3      4      5      6      7      8      9     10     11 
## -0.362 -0.210 -0.143 -0.343 -0.375  0.036  0.053 -0.092  0.033 -0.138 -0.331 
##     12     13     14     15     16     17     18     19     20     21     22 
##  0.236 -0.018 -0.049 -0.135 -0.016  0.070  0.144  0.139 -0.158  0.014 -0.122 
##     23     24 
##  0.199  0.044

PEMODELAN ARIMA

#Maka dari identifikasi plot ACF dan PACF diperoleh model ARIMA yang mungkin sebagai berikut
fit1 = Arima(datats, order = c(1, 1, 1))
fit2 = Arima(datats, order = c(1, 1, 2))
fit3 = Arima(datats, order = c(1, 1, 3))
fit4 = Arima(datats, order = c(1, 1, 4))
fit5 = Arima(datats, order = c(1, 1, 5))
fit6 = Arima(datats, order = c(1, 1, 6))
fit7 = Arima(datats, order = c(1, 1, 7))
fit8 = Arima(datats, order = c(1, 1, 8))
fit9 = Arima(datats, order = c(1, 1, 9))
fit10 = Arima(datats, order = c(2, 1, 1))
fit11 = Arima(datats, order = c(2, 1, 2))
fit12 = Arima(datats, order = c(2, 1, 3))
fit13 = Arima(datats, order = c(2, 1, 4))
fit14 = Arima(datats, order = c(2, 1, 5))
fit15 = Arima(datats, order = c(2, 1, 6))
fit16 = Arima(datats, order = c(2, 1, 7))
fit17 = Arima(datats, order = c(2, 1, 8))
fit18 = Arima(datats, order = c(2, 1, 9))
fit19 = Arima(datats, order = c(3, 1, 1))
fit20 = Arima(datats, order = c(3, 1, 2))
fit21 = Arima(datats, order = c(3, 1, 3))
fit22 = Arima(datats, order = c(3, 1, 4))
fit23 = Arima(datats, order = c(3, 1, 5))
fit24 = Arima(datats, order = c(3, 1, 6))
fit25 = Arima(datats, order = c(3, 1, 7))
fit26 = Arima(datats, order = c(3, 1, 8))
fit27 = Arima(datats, order = c(3, 1, 9))
fit28 = Arima(datats, order = c(4, 1, 1))
fit29 = Arima(datats, order = c(4, 1, 2))
fit30 = Arima(datats, order = c(4, 1, 3))
fit31 = Arima(datats, order = c(4, 1, 4))
fit32 = Arima(datats, order = c(4, 1, 5))
fit33 = Arima(datats, order = c(4, 1, 7))
fit34 = Arima(datats, order = c(4, 1, 8))
fit35 = Arima(datats, order = c(4, 1, 9))
fit36 = Arima(datats, order = c(5, 1, 1))
fit37 = Arima(datats, order = c(5, 1, 2))
fit38 = Arima(datats, order = c(5, 1, 3))
fit39 = Arima(datats, order = c(5, 1, 4))
fit40 = Arima(datats, order = c(5, 1, 5))
fit41 = Arima(datats, order = c(5, 1, 6))
fit42 = Arima(datats, order = c(5, 1, 8))
fit43 = Arima(datats, order = c(5, 1, 9))
fit44 = Arima(datats, order = c(6, 1, 1))
fit45 = Arima(datats, order = c(6, 1, 2))
fit46 = Arima(datats, order = c(6, 1, 3))
fit47 = Arima(datats, order = c(6, 1, 4))
fit48 = Arima(datats, order = c(6, 1, 5))
fit49 = Arima(datats, order = c(6, 1, 6))
fit50 = Arima(datats, order = c(6, 1, 7))
fit51 = Arima(datats, order = c(6, 1, 8))
fit52 = Arima(datats, order = c(6, 1, 9))
fit53 = Arima(datats, order = c(7, 1, 1))
fit54 = Arima(datats, order = c(7, 1, 2))
fit55 = Arima(datats, order = c(7, 1, 3))
fit56 = Arima(datats, order = c(7, 1, 4))
fit57 = Arima(datats, order = c(7, 1, 5))
fit58 = Arima(datats, order = c(7, 1, 6))
fit59 = Arima(datats, order = c(7, 1, 7))
fit60 = Arima(datats, order = c(7, 1, 8))
fit61 = Arima(datats, order = c(7, 1, 9))

PERBANDINGAN EVALUASI MODEL (RMSE dan MAD) SETIAP MODEL ARIMA

#Definisikan vektor nilai p, d, dan q yang akan diuji
p_values <- 1:7
d_values <- 1
q_values <- 1:9

#Fungsi untuk menghitung RMSE
calculate_rmse <- function(actual, predicted) {
  rmse <- sqrt(mean((actual - predicted)^2))
  return(rmse)
}

#Fungsi untuk menghitung MAD
calculate_mad <- function(actual, predicted) {
  mad <- mean(abs(mean(actual) - predicted))
  return(mad)
}

#Buat dataframe untuk menyimpan hasil eksplorasi
parameter_combinations <- expand.grid(p = p_values, d = d_values, q = q_values)

#Inisialisasi vektor untuk menyimpan hasil evaluasi model
rmse_values <- numeric(nrow(parameter_combinations))
mad_values <- numeric(nrow(parameter_combinations))

#Loop melalui setiap kombinasi parameter
for (i in seq_len(nrow(parameter_combinations))) {
  current_params <- parameter_combinations[i, ]
  
  #Buat model ARIMA dengan parameter saat ini
  current_model <- tryCatch({
    Arima(datats, order = c(current_params$p, current_params$d, current_params$q))
  }, error = function(e) NULL)
  
  #Jika model berhasil dibuat, lakukan prediksi dan evaluasi
  if (!is.null(current_model)) {
    predictions <- fitted(current_model)
    
    #Evaluasi model menggunakan RMSE
    rmse_values[i] <- calculate_rmse(datats, predictions)
    
    #Evaluasi model menggunakan MAD
    mad_values[i] <- calculate_mad(datats, predictions)
  } else {
    rmse_values[i] <- NA
    mad_values[i] <- NA
  }
}

#Gabungkan hasil eksplorasi dengan nilai RMSE dan MAD
results <- cbind(parameter_combinations, RMSE = rmse_values, MAD = mad_values)

#Tampilkan hasil
print(results)
##    p d q     RMSE      MAD
## 1  1 1 1 69.00212 33.95178
## 2  2 1 1 68.98947 34.10615
## 3  3 1 1 68.58773 35.14527
## 4  4 1 1 66.42020 38.98994
## 5  5 1 1 64.34061 41.27053
## 6  6 1 1 64.31875 41.49869
## 7  7 1 1 64.07352 41.81151
## 8  1 1 2 68.99565 34.03410
## 9  2 1 2 68.64805 35.77672
## 10 3 1 2 68.03790 36.55480
## 11 4 1 2 61.31408 46.77781
## 12 5 1 2 64.23973 41.70429
## 13 6 1 2 58.98696 46.59279
## 14 7 1 2 61.59347 42.27346
## 15 1 1 3 68.95299 33.96865
## 16 2 1 3 59.62932 42.04598
## 17 3 1 3 68.52314 35.98390
## 18 4 1 3 58.46399 44.15657
## 19 5 1 3 61.93980 41.66188
## 20 6 1 3 59.60236 42.09288
## 21 7 1 3 59.55278 40.36973
## 22 1 1 4 68.59765 34.66014
## 23 2 1 4 65.19517 36.74222
## 24 3 1 4 59.02980 43.56929
## 25 4 1 4 58.38722 44.01002
## 26 5 1 4 58.31763 46.93744
## 27 6 1 4 57.92480 42.82780
## 28 7 1 4 58.44944 37.21757
## 29 1 1 5 66.75392 38.78541
## 30 2 1 5 61.89691 39.81516
## 31 3 1 5 58.73291 43.69367
## 32 4 1 5 56.46450 44.22608
## 33 5 1 5 53.91328 42.73758
## 34 6 1 5 54.19057 41.63036
## 35 7 1 5 54.23143 41.35103
## 36 1 1 6 65.63158 40.29717
## 37 2 1 6 61.42491 39.48057
## 38 3 1 6 64.84459 36.61325
## 39 4 1 6       NA       NA
## 40 5 1 6 54.70048 42.87284
## 41 6 1 6 54.88826 42.50198
## 42 7 1 6 54.17904 41.44758
## 43 1 1 7 65.92893 33.34122
## 44 2 1 7 60.28051 41.28544
## 45 3 1 7 57.45271 42.72832
## 46 4 1 7 54.29460 41.47902
## 47 5 1 7       NA       NA
## 48 6 1 7 53.73688 39.25705
## 49 7 1 7 52.19660 43.71067
## 50 1 1 8 66.04233 35.30986
## 51 2 1 8 63.74913 39.83803
## 52 3 1 8 57.42767 43.01071
## 53 4 1 8 55.87842 42.93365
## 54 5 1 8 54.02291 41.92867
## 55 6 1 8 54.13981 39.36167
## 56 7 1 8 53.46167 39.28114
## 57 1 1 9 61.93023 48.67402
## 58 2 1 9 56.23922 44.25212
## 59 3 1 9 57.47676 43.31299
## 60 4 1 9 55.89215 42.72536
## 61 5 1 9 53.85237 41.97292
## 62 6 1 9 53.54824 39.30829
## 63 7 1 9 53.46338 38.86756

VERIFIKASI MODEL ARIMA

  1. Pemodelan terbaik, yaitu pada fit59 ARIMA(7, 1, 7) karena memiliki nilai RMSE dan MAD paling kecil dimana nilai RMSE sebesar 52.19 dan MAD sebesar 43.71 artinya model ARIMA(7,1,7) merupakan model yang terbaik.

  2. Diperoleh nilai p-value dari ljung-box statistic sebesar 0.95 dimana p-value = 0.9505 > alpha = 0.05 maka tidak terdapat cukup bukti yang cukup untuk menunjukkan adanya korelasi yang signifikan dalam residual.

P-value dari tes Ljung-Box adalah ukuran statistik yang menentukan seberapa kuat bukti yang Anda miliki untuk menolak hipotesis nol bahwa tidak ada korelasi dalam residual pada lag-lag yang diberikan. Jadi, Anda membandingkan nilai p-value dari tes Ljung-Box dengan tingkat signifikansi yang telah ditentukan sebelumnya (biasanya 0.05 atau 0.01) untuk membuat keputusan apakah ada korelasi yang signifikan dalam residual.

Secara umum, aturan praktisnya adalah:

H0 : Tidak terdapat bukti yang cukup untuk menunjukkan adanya korelasi yang signifikan dalam residual

H1 : Terdapat korelasi signifikan dalam residual

Pengambilan Keputusan : H0 diterima jika p-value > alpha (5% / 0.05)

Uji Shapiro Wilk: H0 : Residu memiliki distribusi normal H1 : Residu tidak memiliki distribusi normal

Pengambilan Keputusan : H0 diterima jika p-value > alpha (5% / 0.05)

#Menampilkan plot Ljung-Box Statistics
tsdiag(fit59)

#Menampilkan plot histogram
checkresiduals(fit59)

## 
##  Ljung-Box test
## 
## data:  Residuals from ARIMA(7,1,7)
## Q* = 21.252, df = 10, p-value = 0.01941
## 
## Model df: 14.   Total lags used: 24
fit59
## Series: datats 
## ARIMA(7,1,7) 
## 
## Coefficients:
##           ar1      ar2      ar3      ar4      ar5      ar6      ar7     ma1
##       -0.9546  -1.9540  -1.1615  -1.8476  -0.9949  -0.8807  -0.1771  0.3097
## s.e.   0.1311   0.1559   0.2499   0.1784   0.2412   0.1492   0.1204  0.1150
##          ma2      ma3     ma4      ma5     ma6      ma7
##       1.5048  -0.2249  1.1002  -0.7525  0.1371  -0.7579
## s.e.  0.1392   0.2034  0.1974   0.2128  0.1389   0.1171
## 
## sigma^2 = 3114:  log likelihood = -646.88
## AIC=1323.75   AICc=1328.41   BIC=1365.44
fit59$fitted
##            Jan       Feb       Mar       Apr       May       Jun       Jul
## 2014 217.12341 188.64346 155.96108 143.35014 149.77260 170.65723 196.37964
## 2015 226.26696 206.74926 152.90949 139.50340 147.95554 195.40677 243.91167
## 2016 256.65516 220.42719 157.67768 155.74495 152.67146 220.23902 265.14981
## 2017 279.77154 223.22323 170.70326 187.73648 176.16251 240.79934 332.08094
## 2018 294.32683 199.61530 177.66842 176.25248 185.45276 305.21971 316.39054
## 2019 277.65272 171.18981 219.45683 165.28300 207.50636 327.76479 257.12723
## 2020 262.01042 222.79981 210.14466 121.74661 116.56461 175.37040  93.34808
## 2021  74.72059  91.91920 126.57156 110.94276 141.24255 115.78648 126.01461
## 2022 110.27235 169.40381  80.56310 102.13159 196.51963 193.97273 208.85992
## 2023 187.16920 145.91937 130.90205 159.72492 197.41261 228.95425 198.63002
##            Aug       Sep       Oct       Nov       Dec
## 2014 192.22985 180.01296 157.85238 136.48131 174.01156
## 2015 217.54216 177.38903 147.16506 158.87839 198.66146
## 2016 197.92367 187.68716 164.88456 164.97350 216.51399
## 2017 247.48438 168.40184 185.06238 176.91045 246.21262
## 2018 226.63838 209.31014 177.70772 185.05419 331.21227
## 2019 237.38298 229.89655 164.03867 213.42206 316.93893
## 2020  52.87310 145.78922 122.03840 107.63373 144.46159
## 2021 125.93813  21.76303  55.44547 177.89015 122.63757
## 2022 165.76630 137.72287 130.45053 168.20171 200.00299
## 2023 137.74477 126.98658 170.57952 191.69022 177.07798
summary(fit59)
## Series: datats 
## ARIMA(7,1,7) 
## 
## Coefficients:
##           ar1      ar2      ar3      ar4      ar5      ar6      ar7     ma1
##       -0.9546  -1.9540  -1.1615  -1.8476  -0.9949  -0.8807  -0.1771  0.3097
## s.e.   0.1311   0.1559   0.2499   0.1784   0.2412   0.1492   0.1204  0.1150
##          ma2      ma3     ma4      ma5     ma6      ma7
##       1.5048  -0.2249  1.1002  -0.7525  0.1371  -0.7579
## s.e.  0.1392   0.2034  0.1974   0.2128  0.1389   0.1171
## 
## sigma^2 = 3114:  log likelihood = -646.88
## AIC=1323.75   AICc=1328.41   BIC=1365.44
## 
## Training set error measures:
##                      ME    RMSE      MAE  MPE MAPE      MASE         ACF1
## Training set -0.5548457 52.1966 38.82733 -Inf  Inf 0.7657792 -0.005665773
#Mengakses residual dari model
residual <- residuals(fit59)
residual
##               Jan          Feb          Mar          Apr          May
## 2014    0.2173405  -54.4458400  -23.8172209   -9.2829890    5.5882670
## 2015   -1.0626088  -57.9574583   -2.6730091   16.4901907   50.5665029
## 2016   10.4571758  -53.2985004    6.8458752   -0.3422442   75.8148596
## 2017   23.3983810  -48.8271317   12.0752917   25.1461205   27.7492394
## 2018  -26.4555901  -14.0001044   20.0991200   44.8131179    7.8190693
## 2019  -24.3169786   16.8447629  -15.3416651   62.2164549  -80.2900080
## 2020  -15.1076036  -47.5571225  -76.8067405 -121.7466062 -116.5646080
## 2021   38.3590306  -16.3017392  -11.7499567   -9.0261297   74.6231508
## 2022   88.6396891  -22.2542257   70.0821816  -37.0700828  143.2302737
## 2023    7.9897083    5.7513229   13.7907320   70.8873113   11.5323632
##               Jun          Jul          Aug          Sep          Oct
## 2014   -1.4827667   61.5292632   48.4047280  -53.6207102  -11.6538860
## 2015  -39.6730188  100.1246644  -32.7693268  -20.1280974   12.7880606
## 2016 -102.7060488  109.7875215  -34.5039668  -11.8918240   11.0159839
## 2017   85.1531123  -14.7892790  -73.9303571   33.1273176  -18.8967966
## 2018  115.0171326  -38.8981940  -33.7653612    3.7696589    4.4460650
## 2019  111.4390347   31.2267207  -55.0590952  -30.6504759   35.9988312
## 2020 -175.3703967  -43.5384453  119.4028410  -25.6726064   10.9653625
## 2021   40.0371363 -107.6843115 -112.7093711   51.8304478   73.8062188
## 2022   18.7641941   -5.2279014  -25.0396168    3.3161318    5.4979895
## 2023  -36.1045698   -5.0944930  -21.6973618   20.2343408  -26.2950774
##               Nov          Dec
## 2014   12.9442603   41.1976444
## 2015  -12.4782523   33.3148285
## 2016   -3.0197985   40.5988283
## 2017  -33.4148101    8.6030015
## 2018    6.3589791  -51.7556545
## 2019  -13.2920985   12.7594175
## 2020   18.7149979  -39.9340835
## 2021  -51.4702118   41.7369991
## 2022  -44.4277265   -4.8440758
## 2023  -63.1445098   41.4100056
df <- forecast(fit59)
plot(df)

#Menampilkan nilai p-value dari uji Ljung-Box test
Box.test(df$residuals)
## 
##  Box-Pierce test
## 
## data:  df$residuals
## X-squared = 0.0038521, df = 1, p-value = 0.9505
#Melakukan uji Shapiro-Wilk pada residu
shapiro.test(residuals(fit59))
## 
##  Shapiro-Wilk normality test
## 
## data:  residuals(fit59)
## W = 0.98131, p-value = 0.09433
#Histogram
hist(df$residuals)

#Plot perbandingan fitted model dengan true model (Data Asli setelah di transformasi)
fitted_model2b <- (fitted.values(fit59))
plot(fitted_model2b, type = "l", col = "red", 
     ylim = range(fitted_model2b, datats), 
     xlab = "Tahun", ylab = "Jumlah Pengunjung", main = "Fitted vs True (Setelah Transformasi Data) Model ARIMA")
lines(datats, col = "blue")
#Menambahkan axis untuk tahun
axis(1, at = seq(2014, 2023, by = 1), labels = seq(2014, 2023, by = 1))
legend("topright", legend = c("Prediksi", "Aktual"), 
       col = c("red", "blue"), lty = c(2, 1), cex = 0.8)

se2 <- datats - fitted_model2b
se2
##               Jan          Feb          Mar          Apr          May
## 2014    0.2173405  -54.4458400  -23.8172209   -9.2829890    5.5882670
## 2015   -1.0626088  -57.9574583   -2.6730091   16.4901907   50.5665029
## 2016   10.4571758  -53.2985004    6.8458752   -0.3422442   75.8148596
## 2017   23.3983810  -48.8271317   12.0752917   25.1461205   27.7492394
## 2018  -26.4555901  -14.0001044   20.0991200   44.8131179    7.8190693
## 2019  -24.3169786   16.8447629  -15.3416651   62.2164549  -80.2900080
## 2020  -15.1076036  -47.5571225  -76.8067405 -121.7466062 -116.5646080
## 2021   38.3590306  -16.3017392  -11.7499567   -9.0261297   74.6231508
## 2022   88.6396891  -22.2542257   70.0821816  -37.0700828  143.2302737
## 2023    7.9897083    5.7513229   13.7907320   70.8873113   11.5323632
##               Jun          Jul          Aug          Sep          Oct
## 2014   -1.4827667   61.5292632   48.4047280  -53.6207102  -11.6538860
## 2015  -39.6730188  100.1246644  -32.7693268  -20.1280974   12.7880606
## 2016 -102.7060488  109.7875215  -34.5039668  -11.8918240   11.0159839
## 2017   85.1531123  -14.7892790  -73.9303571   33.1273176  -18.8967966
## 2018  115.0171326  -38.8981940  -33.7653612    3.7696589    4.4460650
## 2019  111.4390347   31.2267207  -55.0590952  -30.6504759   35.9988312
## 2020 -175.3703967  -43.5384453  119.4028410  -25.6726064   10.9653625
## 2021   40.0371363 -107.6843115 -112.7093711   51.8304478   73.8062188
## 2022   18.7641941   -5.2279014  -25.0396168    3.3161318    5.4979895
## 2023  -36.1045698   -5.0944930  -21.6973618   20.2343408  -26.2950774
##               Nov          Dec
## 2014   12.9442603   41.1976444
## 2015  -12.4782523   33.3148285
## 2016   -3.0197985   40.5988283
## 2017  -33.4148101    8.6030015
## 2018    6.3589791  -51.7556545
## 2019  -13.2920985   12.7594175
## 2020   18.7149979  -39.9340835
## 2021  -51.4702118   41.7369991
## 2022  -44.4277265   -4.8440758
## 2023  -63.1445098   41.4100056
se_t2 <- sqrt(mean((se2)^2))
se_t2
## [1] 52.1966

Kesimpulan :

  1. Pada plot Ljung-Box Statistic juga terlihat bahwa semua lag pada plot tidak ada yang di bawah nilai 0.05 dan melalui uji Ljung-Box test diperoleh nilai p-value sebesar 0.95 output p-value = 0.05 sehingga diterima artinya tidak terdapat bukti yang cukup untuk menunjukkan adanya korelasi yang signifikan dalam residual, maka dapat disimpulkan bahwa residual dari model ARIMA(7, 1, 7) sudah bersifat white-noise.

  2. Plot histogram dari residual model ARIMA(7, 1, 7) sudah berdistribusi normal dan melalui pengujian Shapiro Wilk diperoleh nilai p-value sebesar 0.09 output p-value = 0.05 sehingga diterima yang menunjukkan residual dari model ARIMA(7, 1, 7) sudah berdistribusi normal.

  3. Maka dapat disimpulkan bahwa ARIMA(7, 1, 7) merupakan model terbaik untuk meramalkan jumlah pengunjung di Lokawisata Baturraden karena residual dari model ARIMA(7, 1, 7) sudah bersifat white-noise dan berdistribusi normal.

PERAMALAN DENGAN MODEL ARIMA

  1. Plot untuk data aktual dan hasil peramalan jumlah pengunjung tahun 2014-2023 pada model HWES aditif memiliki selisih atau nilai error berdasarkan MAD sebesar 52.33. Hal tersebut dibuktikan dengan plot hasil peramalan jumlah pengunjung tahun 2014-2023 cukup mendekati dengan data aktualnya.

  2. Plot untuk data aktual dan hasil peramalan jumlah pengunjung tahun 2014-2023 pada model ARIMA(7, 1, 7) memiliki selisih atau nilai error berdasarkan MAD sebesar 43.71. Hal tersebut dibuktikan dengan plot hasil peramalan jumlah pengunjung cukup mendekati dengan data aktualnya.

  3. Dari uraian penjelasan di atas, dapat disimpulkan bahwa model paling terbaik, yaitu model ARIMA(7, 1, 7) karena mempunyai nilai RMSE (52.19) dan MAD (43.71) yang paling kecil dan hasil peramalan jumlah pengunjung tahun 2014-2023 paling cukup mendekati dengan data aktualnya dengan nilai error berdasarkan MAD lebih kecil dibandingkan dengan model HWES aditif sebesar 43.71.

#Peramalan untuk periode satu tahun ke depan (tahun 2024)
b <- predict(fit59, n.ahead = 12)
b
## $pred
##           Jan      Feb      Mar      Apr      May      Jun      Jul      Aug
## 2024 176.6889 136.6339 130.7420 183.4703 173.4229 178.5206 172.7962 131.0909
##           Sep      Oct      Nov      Dec
## 2024 154.5536 175.4812 163.1146 186.8139
## 
## $se
##           Jan      Feb      Mar      Apr      May      Jun      Jul      Aug
## 2024 56.71966 60.11930 66.54118 67.79731 68.43032 68.48766 69.58702 72.73378
##           Sep      Oct      Nov      Dec
## 2024 76.23064 79.01868 79.12170 79.16692
#Data hasil prediksi
pred <- c(176.6889, 136.6339, 130.7420, 183.4703, 173.4229, 178.5206, 
          172.7962, 131.0909, 154.5536, 175.4812, 163.1146, 186.8139)

#Nama-nama bulan
months <- c("Jan", "Feb", "Mar", "Apr", "May", "Jun", 
            "Jul", "Aug", "Sep", "Oct", "Nov", "Dec")

#Plot hasil prediksi
plot(pred, type = "o", xaxt = "n", xlab = "Bulan", ylab = "Jumlah Pengunjung", 
     main = "Peramalan Jumlah Pengunjung Tahun 2024")

#Menambahkan sumbu x dengan nama-nama bulan
axis(1, at = 1:12, labels = months)

Kesimpulan :

Hasil peramalan jumlah pengunjung di Lokawisata Baturraden menggunakan model ARIMA(7, 1, 7) mengalami penurunan dan kenaikan yang stabil dan konsisten. Terlihat bahwa kenaikan ataupun penurunan tersebut signifikan.