Input Library

library(readxl)
## Warning: package 'readxl' was built under R version 4.3.3
library(tidymodels)
## Warning: package 'tidymodels' was built under R version 4.3.3
## ── Attaching packages ────────────────────────────────────── tidymodels 1.2.0 ──
## ✔ broom        1.0.7     ✔ recipes      1.1.0
## ✔ dials        1.3.0     ✔ rsample      1.2.1
## ✔ dplyr        1.1.4     ✔ tibble       3.2.1
## ✔ ggplot2      3.5.1     ✔ tidyr        1.3.1
## ✔ infer        1.0.7     ✔ tune         1.2.1
## ✔ modeldata    1.4.0     ✔ workflows    1.1.4
## ✔ parsnip      1.2.1     ✔ workflowsets 1.1.0
## ✔ purrr        1.0.2     ✔ yardstick    1.3.1
## Warning: package 'broom' was built under R version 4.3.3
## Warning: package 'dials' was built under R version 4.3.3
## Warning: package 'scales' was built under R version 4.3.3
## Warning: package 'dplyr' was built under R version 4.3.3
## Warning: package 'ggplot2' was built under R version 4.3.3
## Warning: package 'infer' was built under R version 4.3.3
## Warning: package 'modeldata' was built under R version 4.3.3
## Warning: package 'parsnip' was built under R version 4.3.3
## Warning: package 'purrr' was built under R version 4.3.2
## Warning: package 'recipes' was built under R version 4.3.3
## Warning: package 'rsample' was built under R version 4.3.3
## Warning: package 'tibble' was built under R version 4.3.2
## Warning: package 'tidyr' was built under R version 4.3.3
## Warning: package 'tune' was built under R version 4.3.3
## Warning: package 'workflows' was built under R version 4.3.3
## Warning: package 'workflowsets' was built under R version 4.3.3
## Warning: package 'yardstick' was built under R version 4.3.3
## ── Conflicts ───────────────────────────────────────── tidymodels_conflicts() ──
## ✖ purrr::discard() masks scales::discard()
## ✖ dplyr::filter()  masks stats::filter()
## ✖ dplyr::lag()     masks stats::lag()
## ✖ recipes::step()  masks stats::step()
## • Dig deeper into tidy modeling with R at https://www.tmwr.org
library(tidyverse)
## Warning: package 'tidyverse' was built under R version 4.3.2
## Warning: package 'stringr' was built under R version 4.3.2
## Warning: package 'lubridate' was built under R version 4.3.2
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ forcats   1.0.0     ✔ readr     2.1.4
## ✔ lubridate 1.9.3     ✔ stringr   1.5.0
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ readr::col_factor() masks scales::col_factor()
## ✖ purrr::discard()    masks scales::discard()
## ✖ dplyr::filter()     masks stats::filter()
## ✖ stringr::fixed()    masks recipes::fixed()
## ✖ dplyr::lag()        masks stats::lag()
## ✖ readr::spec()       masks yardstick::spec()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(mlr3verse)
## Warning: package 'mlr3verse' was built under R version 4.3.3
## Loading required package: mlr3
## Warning: package 'mlr3' was built under R version 4.3.3
## 
## Attaching package: 'mlr3verse'
## 
## The following object is masked from 'package:tune':
## 
##     tune
## 
## The following object is masked from 'package:parsnip':
## 
##     tune
library(rpart) # Random Forest
## Warning: package 'rpart' was built under R version 4.3.3
## 
## Attaching package: 'rpart'
## 
## The following object is masked from 'package:dials':
## 
##     prune
library(rpart.plot) # Random Forest
## Warning: package 'rpart.plot' was built under R version 4.3.3
library(dplyr) # %>%
library(ranger)
## Warning: package 'ranger' was built under R version 4.3.2
library(cowplot)
## Warning: package 'cowplot' was built under R version 4.3.3
## 
## Attaching package: 'cowplot'
## 
## The following object is masked from 'package:lubridate':
## 
##     stamp
library(caret)
## Warning: package 'caret' was built under R version 4.3.2
## Loading required package: lattice
## 
## Attaching package: 'caret'
## 
## The following objects are masked from 'package:yardstick':
## 
##     precision, recall, sensitivity, specificity
## 
## The following object is masked from 'package:purrr':
## 
##     lift
library(ROSE)
## Warning: package 'ROSE' was built under R version 4.3.2
## Loaded ROSE 0.0-4
library(caret)
library(themis) # SMOTE
## Warning: package 'themis' was built under R version 4.3.2
library(brulee)
## Warning: package 'brulee' was built under R version 4.3.3
library(DataExplorer)
## Warning: package 'DataExplorer' was built under R version 4.3.2
library(finetune)
## Warning: package 'finetune' was built under R version 4.3.3
library(vip)
## Warning: package 'vip' was built under R version 4.3.2
## 
## Attaching package: 'vip'
## 
## The following object is masked from 'package:utils':
## 
##     vi
library(GGally) # GGPAIRS
## Warning: package 'GGally' was built under R version 4.3.3
## Registered S3 method overwritten by 'GGally':
##   method from   
##   +.gg   ggplot2

Untuk membuat model menggunakan random forest, langkah pertama adalah load data dari excel

Input Data

data <- read.csv("tugas sainsdata 23.csv", sep = ";", header = TRUE)
data$jenis.kelamin <- as.factor(data$jenis.kelamin)
data$pendidikan <- as.factor(data$pendidikan)
data$promo <- as.factor(data$promo)
data$cabang  <- as.factor(data$cabang)

Mengubah variabel respon menjadi faktor dan menghilangkan kolom ID

Data Eksplorasi

Statistika Deskriptif

library(skimr)
## Warning: package 'skimr' was built under R version 4.3.2
## 
## Attaching package: 'skimr'
## The following object is masked from 'package:mlr3':
## 
##     partition
skim_without_charts(data = data)
Data summary
Name data
Number of rows 358
Number of columns 14
_______________________
Column type frequency:
factor 4
numeric 10
________________________
Group variables None

Variable type: factor

skim_variable n_missing complete_rate ordered n_unique top_counts
cabang 0 1 FALSE 14 9: 36, 3: 32, 10: 32, 7: 31
jenis.kelamin 0 1 FALSE 2 2: 206, 1: 152
pendidikan 0 1 FALSE 4 3: 156, 4: 119, 2: 71, 1: 12
promo 0 1 FALSE 2 0: 239, 1: 119

Variable type: numeric

skim_variable n_missing complete_rate mean sd p0 p25 p50 p75 p100
no.sampel 0 1 179.50 103.49 1.00 90.25 179.50 268.75 358.00
usia 0 1 40.13 5.07 26.00 37.00 40.00 44.00 56.00
frekuensi.fashion 0 1 3.06 1.52 0.00 2.00 3.00 4.00 8.00
nilai.fashion 0 1 0.79 0.52 0.03 0.40 0.68 1.05 2.63
frekuensi.footwear 0 1 3.07 1.63 0.00 2.00 3.00 4.00 8.00
nilai.footwear 0 1 0.85 0.61 0.03 0.41 0.69 1.11 3.55
frekuensi.lainnya 0 1 2.78 1.49 0.00 2.00 3.00 4.00 7.00
nilai.lainnya 0 1 0.85 0.58 0.02 0.43 0.73 1.11 3.03
total.nilai.tunai 0 1 2.24 3.43 0.00 0.00 0.68 3.04 23.02
lama.member 0 1 25.85 14.41 1.00 14.00 25.00 38.00 51.00

Pengamatan:

  1. Jumlah baris adalah 358 dan jumlah kolom (variabel) adalah 13, yang terdiri dari 10 variabel bertipe numerik, 3 variabel factor, dan 1 variabel (Y) bertipe factor.

  2. Dari semua variabel, teramati bahwa tidak terdapat variabel yang mengandung missing value.

  3. Untuk variabel respon Y tampaknya terjadi class imbalance, dengan jumlah nilai 0 sebesar 239 dan jumlah nilai 0 sebesar 119.

  4. Berdasarkan nilai minimum, sepertinya tidak ada nilai yang aneh (misal mengandung nilai negatif padahal konteks variabelnya adalah durasi,penghasilan, frekuensi, dan persentase) pada variabel X1 hingga X11.

  5. Beberapa prediktor mengandung nilai 0.

Visualisasi Data

plot_intro(data = data,
           ggtheme = theme_classic(),
           theme_config = list(axis.line=element_blank(),
                               axis.ticks=element_blank(),
                               axis.text.x=element_blank(),
                               axis.title=element_blank()
                               )  
           )

Duplikat Data

any(duplicated(data))
## [1] FALSE

Analisis Univariat

Visualisasi Peubah Respon

a <- data %>% 
count(promo) %>% 
mutate(percent=n*100/sum(n),label=str_c(round(percent,2),"%")) %>% 
ggplot(aes(x="",y=n,fill=promo))+
  geom_col()+
  geom_text(aes(label = label),
            position = position_stack(vjust = 0.5)) +
  coord_polar(theta = "y")+
  theme_cowplot()
ggsave("Imbalance.png", a)
## Saving 7 x 5 in image

Data Imbalanced

b. KDE, Histogram, Boxplot Tiap Prediktor

# KDE
a <- plot_density(data,
             ggtheme = theme_light(),
             geom_density_args = list("fill" = "blue", "alpha" = 0.6),
             ncol=3)

Boxplot

# Boxplot tiap Prediktor
tmp <- par(mfrow=c(3,4))

names <- colnames(data[2:12])
for(i in names){
    boxplot(data[[i]], horizontal=T, col='red')
    title(i)
}

par(tmp)

Terdapat banyak pencilan pada nilai fashion, nilai footwear, nilai lainnya, dan total nilai tunai

Area Plot

x1_y <- ggplot(data, aes(x = cabang, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Cabang vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x1_y

x2_y<- ggplot(data, aes(x = jenis.kelamin, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Jenis Kelamin vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x2_y

x3_y<- ggplot(data, aes(x = pendidikan, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Pendidikan vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x3_y

x4_y<- ggplot(data, aes(x = frekuensi.fashion, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Frekuensi Fashion vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x4_y

x5_y<- ggplot(data, aes(x = frekuensi.footwear, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Frekuensi Footwear vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x5_y

x6_y<- ggplot(data, aes(x = nilai.footwear, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Nilai Footwear vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x6_y

x7_y<- ggplot(data, aes(x = frekuensi.lainnya, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Frekuensi Lainnya vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x7_y

x8_y<- ggplot(data, aes(x = nilai.lainnya, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Nilai Lainnya vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x8_y

x9_y<- ggplot(data, aes(x = total.nilai.tunai, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Total Nilai Tunai vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x9_y

x10_y<- ggplot(data, aes(x = lama.member, fill = promo, color = promo)) +
  geom_density(kernel = "gaussian", color= "black", alpha = 0.8) +
  labs(title = "Lama Member vs Promo") +  
  scale_fill_manual(values = c("0" = "blue", "1" = "red")) + 
  theme_cowplot()
x10_y

col_panggilan <- colnames(data)
#ggpairs(data, columns=col_panggilan, aes(color = promo,alpha = 0.5))

Analisis multivariat

Boxplot Multivariat

plot_boxplot(data = data[-1],by = "promo",
         ggtheme = theme_classic(),
         geom_boxplot_args = list(fill='red'))

boxplot(data[[3]], data$promo, horizontal=T, col='red')

Analisis Korelasi

all_numeric_predictor <- data[c(-1,-2,-3,-5)]
# Heatmap Corrrelation Pearson
plot_correlation(data = all_numeric_predictor,
                 type = "all",
                 cor_args = list(method="pearson"),
                 ggtheme = theme_classic(),
                 theme_config = list(legend.position = "none",
                                     axis.text.x=element_text(angle = 90)))

Yang berpengaruh cabang, usia, nilai Footwear, frekuensi footwear, total nilai tunai, lama member

Yang berpengaruh Pearson: cabang, usia, nilai footwear, frekuensi footwear, lama member Yang berpengaruh Spearman: cabang, usia, nilai Footwear, frekuensi footwear, total nilai tunai, lama member

Data Pra-Processing

Data 1: Anomali dan Feature Extraction

Anomali Data

data_new <- data %>% 
  filter(((frekuensi.fashion > 0  & nilai.fashion ==0) | 
           (frekuensi.fashion == 0  & nilai.fashion > 0)) | 
           ((frekuensi.footwear > 0  & nilai.footwear ==0) | 
              (frekuensi.footwear == 0  & nilai.footwear > 0)) | 
           ((frekuensi.lainnya > 0  & nilai.lainnya ==0) |
              (frekuensi.lainnya == 0  & nilai.lainnya > 0)))
data_new
##    no.sampel cabang jenis.kelamin usia pendidikan frekuensi.fashion
## 1         21      1             1   41          2                 2
## 2         27     10             2   40          3                 0
## 3         49     14             2   28          3                 0
## 4         52     14             2   46          3                 0
## 5         64      9             2   35          3                 3
## 6         94     14             2   45          4                 1
## 7        104      2             1   44          2                 3
## 8        112      1             1   39          4                 0
## 9        137      3             2   40          4                 0
## 10       142      3             2   38          2                 1
## 11       143     10             2   32          3                 0
## 12       153     13             2   37          3                 0
## 13       172      9             2   41          2                 2
## 14       179     11             1   41          2                 0
## 15       181     13             2   44          4                 1
## 16       195      3             2   33          3                 2
## 17       202     10             1   36          4                 3
## 18       217      5             1   44          1                 0
## 19       231      5             1   32          4                 1
## 20       236     12             1   38          2                 0
## 21       243      6             1   35          3                 0
## 22       259      7             2   39          3                 1
## 23       279      3             2   39          2                 1
## 24       284      3             2   43          3                 3
## 25       287      7             2   34          4                 1
## 26       290      6             2   48          2                 0
## 27       291      8             1   44          4                 4
## 28       310      3             2   45          4                 0
## 29       313      8             1   56          3                 4
## 30       320      1             1   47          2                 0
## 31       324     13             2   46          4                 4
## 32       353     12             2   42          3                 2
## 33       355      7             2   37          4                 2
##    nilai.fashion frekuensi.footwear nilai.footwear frekuensi.lainnya
## 1         0.6352                  0         0.6734                 2
## 2         0.9158                  3         0.6022                 0
## 3         1.4866                  4         0.5018                 0
## 4         0.9998                  5         1.0418                 0
## 5         0.6970                  0         0.5288                 1
## 6         0.1066                  0         0.0644                 1
## 7         1.0230                  0         2.4298                 3
## 8         0.0898                  2         0.3694                 0
## 9         1.0758                  3         0.6992                 0
## 10        1.9944                  5         1.6942                 0
## 11        0.5164                  2         2.5292                 0
## 12        0.4632                  4         0.9636                 0
## 13        0.6418                  3         2.4346                 0
## 14        0.3296                  5         0.5542                 0
## 15        0.5928                  1         2.7520                 0
## 16        0.9008                  0         1.2710                 2
## 17        1.3390                  0         1.3960                 3
## 18        0.6590                  4         0.4756                 0
## 19        0.1780                  1         1.7278                 0
## 20        0.4886                  5         0.9914                 0
## 21        0.3790                  5         0.3320                 0
## 22        0.4528                  4         0.4224                 0
## 23        0.3000                  0         0.8818                 0
## 24        0.8956                  0         0.2238                 3
## 25        0.5258                  4         0.3660                 0
## 26        0.1528                  3         0.3956                 0
## 27        0.0676                  0         1.2780                 4
## 28        0.3364                  3         2.7402                 0
## 29        0.2934                  0         0.0874                 4
## 30        0.0592                  2         0.2044                 0
## 31        0.5120                  0         0.9438                 3
## 32        0.3298                  0         1.6796                 1
## 33        0.6226                  0         0.7206                 2
##    nilai.lainnya total.nilai.tunai lama.member promo
## 1         0.9780              2.68           7     0
## 2         1.8056              1.06          33     0
## 3         0.5484              0.00          14     0
## 4         0.2828              2.68          50     0
## 5         1.0264              2.57          44     0
## 6         0.3904              0.07          19     0
## 7         0.8418              2.60          17     0
## 8         1.1160              0.00          28     0
## 9         2.5776              0.17          33     0
## 10        0.6570              0.00          15     1
## 11        0.7262              4.82          41     0
## 12        1.0144              1.95          33     0
## 13        0.8766              7.18          32     0
## 14        1.6476              2.02           1     1
## 15        1.5186              0.00          34     0
## 16        1.8024              2.97          25     0
## 17        1.0828              0.00          16     1
## 18        2.6500              0.00          21     1
## 19        1.3642              1.68          37     1
## 20        1.4048              3.91          21     0
## 21        0.1914              0.50           8     1
## 22        0.6282              0.00          14     1
## 23        0.6910              0.23           3     0
## 24        0.0898              1.68          12     0
## 25        0.3554              0.40          32     1
## 26        1.0428              0.00           4     1
## 27        0.2660              0.61          47     0
## 28        1.2704              6.59          29     0
## 29        1.6668              3.03           5     0
## 30        0.8228              0.00          12     0
## 31        0.9848              4.63          23     0
## 32        0.5102              0.29          45     0
## 33        0.6518              1.83           5     1
data2<- anti_join(data, data_new)
## Joining with `by = join_by(no.sampel, cabang, jenis.kelamin, usia, pendidikan,
## frekuensi.fashion, nilai.fashion, frekuensi.footwear, nilai.footwear,
## frekuensi.lainnya, nilai.lainnya, total.nilai.tunai, lama.member, promo)`

Feature Extraction: Semua Frekuensi Pembelian

data2$Total_Frekuensi_Pembelian <- data2$frekuensi.fashion  + data2$frekuensi.lainnya+ data2$frekuensi.footwear
plot_boxplot(data = data2[-1],by = "promo",
         ggtheme = theme_classic(),
         geom_boxplot_args = list(fill='red'))

a <- ggplot(data = data2, aes(x= frekuensi.lainnya, y = promo))+
  geom_boxplot(fill = "red") + theme_classic()
ggsave("Boxplot FE 3.png", a)
## Saving 7 x 5 in image
all_numeric_predictor <- data2[c(-1,-2,-3,-5,-6,-8,-10)]
# Heatmap Corrrelation Pearson
a <- plot_correlation(data = all_numeric_predictor,
                 type = "all",
                 cor_args = list(method="pearson"),
                 ggtheme = theme_classic(),
                 theme_config = list(legend.position = "none",
                                     axis.text.x=element_text(angle = 90)))

ggsave("Korelasi.png", a)
## Saving 7 x 5 in image

Data 2: Anomali dan Feature Extraction

Anomali Data

data3 <- data
data_new <- data %>% 
  filter(((frekuensi.fashion > 0  & nilai.fashion ==0) | 
           (frekuensi.fashion == 0  & nilai.fashion > 0)) | 
           ((frekuensi.footwear > 0  & nilai.footwear ==0) | 
              (frekuensi.footwear == 0  & nilai.footwear > 0)) | 
           ((frekuensi.lainnya > 0  & nilai.lainnya ==0) |
              (frekuensi.lainnya == 0  & nilai.lainnya > 0)))
data_new
##    no.sampel cabang jenis.kelamin usia pendidikan frekuensi.fashion
## 1         21      1             1   41          2                 2
## 2         27     10             2   40          3                 0
## 3         49     14             2   28          3                 0
## 4         52     14             2   46          3                 0
## 5         64      9             2   35          3                 3
## 6         94     14             2   45          4                 1
## 7        104      2             1   44          2                 3
## 8        112      1             1   39          4                 0
## 9        137      3             2   40          4                 0
## 10       142      3             2   38          2                 1
## 11       143     10             2   32          3                 0
## 12       153     13             2   37          3                 0
## 13       172      9             2   41          2                 2
## 14       179     11             1   41          2                 0
## 15       181     13             2   44          4                 1
## 16       195      3             2   33          3                 2
## 17       202     10             1   36          4                 3
## 18       217      5             1   44          1                 0
## 19       231      5             1   32          4                 1
## 20       236     12             1   38          2                 0
## 21       243      6             1   35          3                 0
## 22       259      7             2   39          3                 1
## 23       279      3             2   39          2                 1
## 24       284      3             2   43          3                 3
## 25       287      7             2   34          4                 1
## 26       290      6             2   48          2                 0
## 27       291      8             1   44          4                 4
## 28       310      3             2   45          4                 0
## 29       313      8             1   56          3                 4
## 30       320      1             1   47          2                 0
## 31       324     13             2   46          4                 4
## 32       353     12             2   42          3                 2
## 33       355      7             2   37          4                 2
##    nilai.fashion frekuensi.footwear nilai.footwear frekuensi.lainnya
## 1         0.6352                  0         0.6734                 2
## 2         0.9158                  3         0.6022                 0
## 3         1.4866                  4         0.5018                 0
## 4         0.9998                  5         1.0418                 0
## 5         0.6970                  0         0.5288                 1
## 6         0.1066                  0         0.0644                 1
## 7         1.0230                  0         2.4298                 3
## 8         0.0898                  2         0.3694                 0
## 9         1.0758                  3         0.6992                 0
## 10        1.9944                  5         1.6942                 0
## 11        0.5164                  2         2.5292                 0
## 12        0.4632                  4         0.9636                 0
## 13        0.6418                  3         2.4346                 0
## 14        0.3296                  5         0.5542                 0
## 15        0.5928                  1         2.7520                 0
## 16        0.9008                  0         1.2710                 2
## 17        1.3390                  0         1.3960                 3
## 18        0.6590                  4         0.4756                 0
## 19        0.1780                  1         1.7278                 0
## 20        0.4886                  5         0.9914                 0
## 21        0.3790                  5         0.3320                 0
## 22        0.4528                  4         0.4224                 0
## 23        0.3000                  0         0.8818                 0
## 24        0.8956                  0         0.2238                 3
## 25        0.5258                  4         0.3660                 0
## 26        0.1528                  3         0.3956                 0
## 27        0.0676                  0         1.2780                 4
## 28        0.3364                  3         2.7402                 0
## 29        0.2934                  0         0.0874                 4
## 30        0.0592                  2         0.2044                 0
## 31        0.5120                  0         0.9438                 3
## 32        0.3298                  0         1.6796                 1
## 33        0.6226                  0         0.7206                 2
##    nilai.lainnya total.nilai.tunai lama.member promo
## 1         0.9780              2.68           7     0
## 2         1.8056              1.06          33     0
## 3         0.5484              0.00          14     0
## 4         0.2828              2.68          50     0
## 5         1.0264              2.57          44     0
## 6         0.3904              0.07          19     0
## 7         0.8418              2.60          17     0
## 8         1.1160              0.00          28     0
## 9         2.5776              0.17          33     0
## 10        0.6570              0.00          15     1
## 11        0.7262              4.82          41     0
## 12        1.0144              1.95          33     0
## 13        0.8766              7.18          32     0
## 14        1.6476              2.02           1     1
## 15        1.5186              0.00          34     0
## 16        1.8024              2.97          25     0
## 17        1.0828              0.00          16     1
## 18        2.6500              0.00          21     1
## 19        1.3642              1.68          37     1
## 20        1.4048              3.91          21     0
## 21        0.1914              0.50           8     1
## 22        0.6282              0.00          14     1
## 23        0.6910              0.23           3     0
## 24        0.0898              1.68          12     0
## 25        0.3554              0.40          32     1
## 26        1.0428              0.00           4     1
## 27        0.2660              0.61          47     0
## 28        1.2704              6.59          29     0
## 29        1.6668              3.03           5     0
## 30        0.8228              0.00          12     0
## 31        0.9848              4.63          23     0
## 32        0.5102              0.29          45     0
## 33        0.6518              1.83           5     1
data3 <- anti_join(data3, data_new)
## Joining with `by = join_by(no.sampel, cabang, jenis.kelamin, usia, pendidikan,
## frekuensi.fashion, nilai.fashion, frekuensi.footwear, nilai.footwear,
## frekuensi.lainnya, nilai.lainnya, total.nilai.tunai, lama.member, promo)`

Feature Extraction: Semua Frekuensi Pembelian

data3$Total_Frekuensi_Pembelian <- data3$frekuensi.fashion + data3$frekuensi.lainnya + data3$frekuensi.footwear

test

diskretasi_usia <- function(){
  for (i in 1:length(data3$usia)){
    x <- data3$usia[i]
    if (x>=25 && x < 36){
      x<- "1"
    }
    else if (x>= 36 && x < 46){
      x <- "2"
    }
    else if (x >= 46){
      x <- "3"
    }
    data3[i,4] <- x
    
  }
  data3$usia <- as.factor(data3$usia)
  return(data3)
}
diskretasi_member <- function(b){
  for (i in 1:length(b[,13])){
    x <- b[i,13]
    if (x>=1 && x < 13){
      x<- "1"
    }
    else if (x>= 13 && x < 25){
      x <- "2"
    }
    else if (x>= 25 && x < 37){
      x <- "3"
    }
    else if (x>= 37 && x < 49){
      x <- "4"
    }
    else if (x>= 49){
      x <- "5"
    }
    b[i,13] <- x
  }
  b[,13] <- as.factor(b[,13])
  return(b)
}
data3_diskret <- diskretasi_usia()
data3_diskret2 <- diskretasi_member(data3_diskret)

Feature Extraction: Pembelian Non Tunai

Data 3: Anomali dan Feature Extraction

Anomali Data

data4 <- data
data4 <- anti_join(data4,data_new)
## Joining with `by = join_by(no.sampel, cabang, jenis.kelamin, usia, pendidikan,
## frekuensi.fashion, nilai.fashion, frekuensi.footwear, nilai.footwear,
## frekuensi.lainnya, nilai.lainnya, total.nilai.tunai, lama.member, promo)`
data4$Total_Frekuensi_Pembelian <- data4$frekuensi.fashion + data4$frekuensi.lainnya + data4$frekuensi.footwear
data4$Total_Rata_Pembelian <- data4$nilai.fashion + data4$nilai.lainnya + data4$nilai.footwear
data_nontunai <- data4 %>% 
  filter(((frekuensi.fashion > 0  | nilai.fashion >0)| 
           (frekuensi.footwear > 0  | nilai.footwear > 0) | 
           ((frekuensi.lainnya > 0  | nilai.lainnya > 0))) & total.nilai.tunai == 0) 
data_nontunai
##     no.sampel cabang jenis.kelamin usia pendidikan frekuensi.fashion
## 1           1     11             2   38          2                 5
## 2           4      2             1   43          3                 6
## 3           8      9             1   42          3                 5
## 4          12      1             2   44          4                 3
## 5          16      2             2   46          2                 4
## 6          23      6             2   40          3                 5
## 7          24      6             2   41          3                 3
## 8          30      9             1   39          1                 2
## 9          31      9             2   40          3                 3
## 10         33      2             1   46          4                 6
## 11         34      7             1   37          3                 4
## 12         36     12             2   43          3                 3
## 13         39     14             1   30          3                 3
## 14         41      6             1   42          3                 1
## 15         44     14             2   44          3                 4
## 16         47      8             1   40          4                 3
## 17         48      7             1   35          3                 5
## 18         51      5             2   43          3                 3
## 19         54      7             1   37          2                 1
## 20         57      2             2   42          2                 3
## 21         58      9             1   40          3                 2
## 22         59      5             2   36          4                 4
## 23         61      2             1   40          2                 4
## 24         63     12             1   36          3                 2
## 25         65      7             1   34          4                 2
## 26         66      6             1   42          4                 3
## 27         67     14             2   39          3                 4
## 28         70     14             1   43          3                 3
## 29         71     14             1   43          4                 3
## 30         72      1             2   38          3                 4
## 31         73     12             2   44          3                 3
## 32         74     14             2   39          3                 5
## 33         75     10             2   42          3                 5
## 34         77      7             2   33          4                 4
## 35         79     10             1   45          4                 1
## 36         80     12             1   32          3                 1
## 37         82     14             1   47          4                 2
## 38         88     14             1   42          4                 3
## 39         89      7             2   40          4                 3
## 40         90      5             1   45          1                 3
## 41         92      7             2   44          3                 5
## 42         93      1             2   35          3                 4
## 43         96      3             2   49          4                 3
## 44         97     13             2   35          3                 1
## 45         98      9             2   43          4                 2
## 46         99      3             2   41          4                 6
## 47        100     12             2   40          3                 4
## 48        101      9             2   46          3                 2
## 49        103      4             1   37          2                 4
## 50        105      4             2   41          2                 3
## 51        106     10             1   34          1                 4
## 52        111     11             2   33          1                 1
## 53        113     11             2   36          2                 6
## 54        114      8             1   40          2                 3
## 55        117      3             1   43          2                 3
## 56        118     14             1   40          4                 4
## 57        121      7             1   45          3                 3
## 58        123      5             2   41          3                 1
## 59        126     12             1   31          4                 3
## 60        128      4             1   43          4                 5
## 61        131      5             1   42          3                 5
## 62        138      8             2   34          4                 3
## 63        139      1             1   37          3                 3
## 64        141     10             2   32          2                 2
## 65        149      2             1   41          4                 4
## 66        150     11             2   30          4                 1
## 67        151      3             2   44          3                 6
## 68        154      2             2   47          4                 4
## 69        158      9             1   46          4                 5
## 70        162      7             1   41          2                 4
## 71        164      9             1   37          4                 3
## 72        165     10             1   36          4                 4
## 73        169      3             1   38          3                 4
## 74        170     12             2   35          3                 4
## 75        174      8             1   33          4                 4
## 76        178      5             1   31          2                 3
## 77        183     14             1   37          3                 3
## 78        184      9             2   45          2                 2
## 79        185      7             1   35          3                 2
## 80        188     11             2   44          3                 3
## 81        189      5             2   37          4                 1
## 82        193      4             1   36          2                 4
## 83        200      8             2   43          2                 4
## 84        201     10             2   38          3                 5
## 85        203      5             1   36          3                 3
## 86        209      9             2   37          4                 5
## 87        211      6             2   35          4                 3
## 88        215      3             2   49          4                 2
## 89        216     10             1   49          4                 3
## 90        218      9             1   45          4                 1
## 91        221     14             1   40          3                 5
## 92        223     14             2   46          3                 7
## 93        224     13             2   46          4                 3
## 94        226     12             2   38          1                 1
## 95        227      8             2   42          4                 1
## 96        230      3             1   42          3                 2
## 97        235      3             2   50          2                 2
## 98        238      5             1   50          4                 5
## 99        242     12             1   44          4                 2
## 100       248      8             2   35          4                 4
## 101       249      2             2   40          2                 3
## 102       251      4             2   45          2                 2
## 103       252      2             2   31          3                 5
## 104       254      5             1   34          4                 1
## 105       256     13             1   30          4                 3
## 106       257     11             2   44          3                 3
## 107       258     10             2   35          2                 2
## 108       263      3             2   40          3                 3
## 109       264      9             2   36          4                 5
## 110       267      9             2   38          3                 2
## 111       270      1             2   42          4                 2
## 112       271     13             2   39          3                 4
## 113       274      1             1   50          2                 5
## 114       275     12             2   37          4                 1
## 115       280      9             1   39          4                 3
## 116       285      9             2   34          2                 2
## 117       286      9             1   48          1                 3
## 118       294      1             1   35          2                 4
## 119       295      8             1   40          4                 2
## 120       296     12             1   29          3                 4
## 121       301      7             1   46          2                 2
## 122       302      8             2   46          2                 3
## 123       304     14             2   42          3                 3
## 124       311     13             2   37          3                 2
## 125       317      7             1   37          4                 5
## 126       319     10             2   35          4                 4
## 127       323      7             2   39          4                 4
## 128       327      7             1   29          3                 1
## 129       332     12             2   37          2                 3
## 130       334     11             2   37          3                 1
## 131       335     12             1   38          3                 5
## 132       339      5             2   42          3                 4
## 133       344      7             2   38          4                 4
## 134       350      8             1   40          2                 4
## 135       352      1             2   42          2                 2
## 136       354     12             1   45          4                 4
## 137       356     13             2   42          3                 4
##     nilai.fashion frekuensi.footwear nilai.footwear frekuensi.lainnya
## 1          1.1588                  1         0.4560                 4
## 2          0.3612                  3         0.5626                 6
## 3          0.3972                  3         0.2280                 4
## 4          0.3032                  3         0.4152                 3
## 5          0.6580                  2         0.6720                 4
## 6          0.9162                  3         2.8932                 5
## 7          1.6652                  2         1.1082                 3
## 8          0.2320                  1         0.4656                 1
## 9          1.1172                  3         0.6006                 3
## 10         0.3978                  5         0.5438                 5
## 11         0.8578                  7         1.1914                 4
## 12         0.7474                  6         0.8420                 3
## 13         0.7080                  2         1.1704                 3
## 14         0.2584                  4         0.5526                 1
## 15         0.4498                  3         0.9920                 3
## 16         0.1692                  3         1.2122                 3
## 17         0.4014                  6         0.3272                 4
## 18         0.5848                  2         0.7110                 3
## 19         0.1878                  4         0.2702                 1
## 20         0.7874                  6         0.2848                 3
## 21         1.2244                  1         0.3646                 2
## 22         0.1094                  3         0.0508                 4
## 23         2.0776                  2         1.7100                 4
## 24         1.1672                  4         1.3962                 1
## 25         0.7948                  2         1.4732                 2
## 26         0.6052                  1         0.8180                 2
## 27         0.4274                  3         0.7756                 3
## 28         0.4596                  3         0.0864                 2
## 29         0.2938                  3         0.8572                 3
## 30         0.2436                  4         0.7096                 2
## 31         1.8560                  1         0.4080                 3
## 32         0.8322                  5         3.5494                 5
## 33         1.0726                  4         0.8582                 5
## 34         0.1618                  2         1.2094                 4
## 35         0.2162                  1         0.4944                 1
## 36         0.7378                  4         0.6298                 1
## 37         0.0824                  2         0.5744                 2
## 38         1.6262                  3         0.7242                 3
## 39         1.1430                  2         1.5044                 3
## 40         2.2716                  2         1.4642                 2
## 41         0.8796                  2         0.7912                 5
## 42         2.2006                  2         0.5302                 4
## 43         1.4324                  2         1.7754                 2
## 44         0.4670                  5         0.0250                 1
## 45         0.8562                  8         0.3494                 2
## 46         2.3006                  3         0.5554                 6
## 47         0.3060                  5         0.6344                 4
## 48         0.7802                  4         0.6124                 1
## 49         0.6044                  2         0.8074                 4
## 50         0.6628                  1         1.0388                 3
## 51         0.5060                  2         0.4764                 4
## 52         1.4026                  2         1.8544                 1
## 53         0.3620                  5         0.3708                 5
## 54         1.0576                  1         0.5406                 3
## 55         0.8782                  3         0.5402                 3
## 56         1.4614                  5         2.2678                 4
## 57         1.0432                  4         0.8360                 3
## 58         0.5476                  3         1.1274                 1
## 59         0.9770                  3         0.7792                 2
## 60         1.0212                  4         0.3192                 5
## 61         0.5828                  2         0.4592                 5
## 62         0.9144                  5         1.3064                 3
## 63         0.6008                  1         1.1428                 2
## 64         0.6992                  5         0.3702                 2
## 65         0.6912                  6         0.3332                 4
## 66         0.5144                  3         1.1400                 1
## 67         0.7406                  4         0.9764                 6
## 68         1.2490                  3         0.4326                 4
## 69         0.3942                  1         0.7114                 4
## 70         0.9178                  3         0.1720                 4
## 71         0.5144                  4         0.4716                 3
## 72         0.0710                  7         0.3342                 4
## 73         1.1700                  7         2.2276                 4
## 74         1.0124                  2         1.0558                 4
## 75         0.3012                  4         0.4540                 4
## 76         1.3982                  1         0.8708                 3
## 77         0.4218                  3         1.2106                 3
## 78         0.8540                  4         1.5140                 2
## 79         0.0436                  4         0.7572                 2
## 80         0.7434                  2         0.4764                 2
## 81         0.0728                  1         0.6946                 1
## 82         0.7052                  4         0.6468                 3
## 83         0.3538                  2         0.3870                 3
## 84         0.9758                  3         0.4550                 5
## 85         1.3438                  2         1.0136                 1
## 86         1.1160                  2         0.4304                 5
## 87         0.8078                  2         0.3832                 3
## 88         1.4352                  4         0.6002                 2
## 89         1.0656                  3         1.5228                 2
## 90         0.3946                  4         0.1444                 1
## 91         0.7428                  4         0.2388                 4
## 92         0.9844                  3         0.8712                 7
## 93         1.7836                  2         0.3928                 3
## 94         0.5232                  3         0.8160                 1
## 95         0.2030                  2         0.5932                 1
## 96         0.1906                  4         0.8268                 2
## 97         0.1494                  5         0.2928                 2
## 98         0.6392                  1         0.2736                 5
## 99         0.7520                  7         1.3686                 2
## 100        0.5990                  2         0.6540                 4
## 101        0.7084                  2         0.4616                 3
## 102        0.3924                  2         0.2432                 2
## 103        0.3202                  4         1.7544                 5
## 104        1.5422                  7         0.6236                 1
## 105        1.4190                  3         0.7962                 3
## 106        0.7750                  3         1.1862                 3
## 107        0.2876                  2         0.3852                 1
## 108        2.5078                  5         0.9228                 3
## 109        1.0838                  4         1.6454                 5
## 110        0.0578                  4         0.2680                 2
## 111        0.8828                  2         0.5552                 2
## 112        0.9876                  2         0.1514                 4
## 113        0.5220                  5         1.6352                 4
## 114        1.4922                  1         0.4800                 1
## 115        0.6192                  4         0.2198                 3
## 116        1.4020                  7         1.3114                 2
## 117        0.3608                  6         0.4002                 3
## 118        0.4302                  3         1.5724                 3
## 119        0.9484                  4         1.3998                 2
## 120        0.1806                  3         0.3066                 4
## 121        0.7358                  2         0.6044                 2
## 122        1.9708                  4         1.9294                 3
## 123        0.9964                  3         1.1062                 3
## 124        0.7728                  3         0.9014                 2
## 125        1.3142                  3         0.8154                 5
## 126        1.5568                  3         0.8174                 4
## 127        0.4994                  1         0.7226                 2
## 128        0.3730                  5         2.0604                 1
## 129        0.9584                  6         0.3758                 3
## 130        1.0348                  2         0.9032                 1
## 131        2.3752                  1         0.2236                 4
## 132        2.2716                  1         1.5996                 4
## 133        0.4612                  1         0.3452                 4
## 134        0.0334                  2         0.4210                 4
## 135        0.2580                  3         1.1848                 2
## 136        0.1342                  4         0.8448                 4
## 137        1.7268                  1         0.2176                 4
##     nilai.lainnya total.nilai.tunai lama.member promo Total_Frekuensi_Pembelian
## 1          0.9990                 0          18     0                        10
## 2          0.4118                 0           9     1                        15
## 3          1.2490                 0          27     0                        12
## 4          0.3550                 0          31     1                         9
## 5          0.7420                 0           1     1                        10
## 6          0.2850                 0          48     1                        13
## 7          1.8380                 0          35     1                         8
## 8          0.0480                 0          12     0                         4
## 9          1.0814                 0           6     0                         9
## 10         2.8132                 0          30     0                        16
## 11         0.5278                 0          28     1                        15
## 12         0.6830                 0          20     1                        12
## 13         1.0532                 0          25     0                         8
## 14         1.4986                 0          18     1                         6
## 15         2.1182                 0          10     0                        10
## 16         0.8732                 0          28     0                         9
## 17         0.4470                 0          49     1                        15
## 18         0.9744                 0          29     0                         8
## 19         0.7448                 0          13     1                         6
## 20         0.4536                 0          45     0                        12
## 21         1.3284                 0          47     0                         5
## 22         0.5374                 0          26     0                        11
## 23         0.6404                 0          39     1                        10
## 24         1.3454                 0          35     0                         7
## 25         0.5674                 0          46     0                         6
## 26         1.3472                 0          33     0                         6
## 27         1.0402                 0          42     0                        10
## 28         0.4000                 0          14     0                         8
## 29         0.7194                 0           5     0                         9
## 30         0.5942                 0          27     0                        10
## 31         0.6004                 0          19     0                         7
## 32         1.0716                 0           4     1                        15
## 33         0.7236                 0          45     0                        14
## 34         1.0066                 0          24     1                        10
## 35         0.3732                 0          40     0                         3
## 36         0.6504                 0          12     0                         6
## 37         0.1142                 0          29     0                         6
## 38         1.2540                 0           5     0                         9
## 39         0.4814                 0          19     1                         8
## 40         1.8594                 0          38     1                         7
## 41         0.2466                 0          15     1                        12
## 42         0.1704                 0           6     0                        10
## 43         0.5376                 0           8     0                         7
## 44         0.9202                 0          33     0                         7
## 45         0.6810                 0          34     0                        12
## 46         0.0490                 0          45     0                        15
## 47         0.1832                 0          15     0                        13
## 48         0.8670                 0          14     1                         7
## 49         1.0746                 0          30     0                        10
## 50         0.1270                 0          45     0                         7
## 51         0.7240                 0          10     0                        10
## 52         1.7536                 0          48     0                         4
## 53         0.3156                 0          15     0                        16
## 54         0.1662                 0          35     0                         7
## 55         0.8304                 0          42     0                         9
## 56         0.7800                 0          32     0                        13
## 57         1.8014                 0          19     1                        10
## 58         0.3632                 0          46     0                         5
## 59         0.8736                 0          44     0                         8
## 60         0.6992                 0          12     0                        14
## 61         0.8610                 0          45     0                        12
## 62         0.8978                 0           4     1                        11
## 63         0.6906                 0           2     0                         6
## 64         0.2854                 0          37     0                         9
## 65         1.9450                 0           6     1                        14
## 66         0.7660                 0          19     0                         5
## 67         1.2314                 0          35     0                        16
## 68         1.2746                 0          23     0                        11
## 69         1.3002                 0          32     1                        10
## 70         3.0334                 0          30     1                        11
## 71         0.4834                 0          10     0                        10
## 72         1.4188                 0          10     1                        15
## 73         0.8754                 0           5     1                        15
## 74         1.3694                 0          18     1                        10
## 75         0.9494                 0          18     0                        12
## 76         1.1386                 0          49     1                         7
## 77         0.4022                 0          38     0                         9
## 78         0.5362                 0          29     1                         8
## 79         0.7624                 0          46     1                         8
## 80         0.5888                 0          15     0                         7
## 81         0.6954                 0          12     0                         3
## 82         0.7014                 0          28     0                        11
## 83         1.4008                 0           9     0                         9
## 84         0.8116                 0          32     0                        13
## 85         0.5770                 0           7     0                         6
## 86         1.1758                 0           3     1                        12
## 87         0.4026                 0          16     1                         8
## 88         1.2034                 0          10     1                         8
## 89         0.6324                 0          46     1                         8
## 90         0.7576                 0          25     0                         6
## 91         0.5100                 0          21     0                        13
## 92         0.2802                 0          48     0                        17
## 93         0.9560                 0          29     0                         8
## 94         1.3788                 0          17     1                         5
## 95         0.5876                 0          19     0                         4
## 96         0.5198                 0          50     0                         8
## 97         0.4562                 0          32     1                         9
## 98         0.8706                 0          13     1                        11
## 99         1.4190                 0          19     0                        11
## 100        1.8842                 0          43     0                        10
## 101        1.3964                 0          44     0                         8
## 102        0.2264                 0          23     0                         6
## 103        1.3592                 0          47     1                        14
## 104        0.8686                 0           5     0                         9
## 105        0.7694                 0          15     1                         9
## 106        1.1726                 0           5     1                         9
## 107        0.4818                 0           4     0                         5
## 108        0.7604                 0          18     0                        11
## 109        0.3928                 0          29     0                        14
## 110        1.1828                 0          21     0                         8
## 111        0.1646                 0          17     0                         6
## 112        1.0740                 0          43     0                        10
## 113        1.9836                 0          22     0                        14
## 114        1.3348                 0          22     1                         3
## 115        0.2798                 0          19     0                        10
## 116        1.0980                 0          28     0                        11
## 117        0.8948                 0          42     0                        12
## 118        0.8962                 0           6     0                        10
## 119        1.1850                 0          37     0                         8
## 120        2.9106                 0          19     0                        11
## 121        0.1858                 0          25     1                         6
## 122        0.1458                 0          35     1                        10
## 123        0.5982                 0          22     0                         9
## 124        0.7122                 0          32     0                         7
## 125        0.1956                 0           2     1                        13
## 126        3.0118                 0          47     0                        11
## 127        0.6204                 0          18     0                         7
## 128        0.5358                 0          46     1                         7
## 129        1.2062                 0          23     0                        12
## 130        0.2020                 0           8     0                         4
## 131        1.0824                 0           4     0                        10
## 132        0.4750                 0          50     0                         9
## 133        0.4640                 0           2     1                         9
## 134        1.7718                 0          15     0                        10
## 135        1.0504                 0           4     1                         7
## 136        0.0224                 0          51     0                        12
## 137        0.8938                 0          45     0                         9
##     Total_Rata_Pembelian
## 1                 2.6138
## 2                 1.3356
## 3                 1.8742
## 4                 1.0734
## 5                 2.0720
## 6                 4.0944
## 7                 4.6114
## 8                 0.7456
## 9                 2.7992
## 10                3.7548
## 11                2.5770
## 12                2.2724
## 13                2.9316
## 14                2.3096
## 15                3.5600
## 16                2.2546
## 17                1.1756
## 18                2.2702
## 19                1.2028
## 20                1.5258
## 21                2.9174
## 22                0.6976
## 23                4.4280
## 24                3.9088
## 25                2.8354
## 26                2.7704
## 27                2.2432
## 28                0.9460
## 29                1.8704
## 30                1.5474
## 31                2.8644
## 32                5.4532
## 33                2.6544
## 34                2.3778
## 35                1.0838
## 36                2.0180
## 37                0.7710
## 38                3.6044
## 39                3.1288
## 40                5.5952
## 41                1.9174
## 42                2.9012
## 43                3.7454
## 44                1.4122
## 45                1.8866
## 46                2.9050
## 47                1.1236
## 48                2.2596
## 49                2.4864
## 50                1.8286
## 51                1.7064
## 52                5.0106
## 53                1.0484
## 54                1.7644
## 55                2.2488
## 56                4.5092
## 57                3.6806
## 58                2.0382
## 59                2.6298
## 60                2.0396
## 61                1.9030
## 62                3.1186
## 63                2.4342
## 64                1.3548
## 65                2.9694
## 66                2.4204
## 67                2.9484
## 68                2.9562
## 69                2.4058
## 70                4.1232
## 71                1.4694
## 72                1.8240
## 73                4.2730
## 74                3.4376
## 75                1.7046
## 76                3.4076
## 77                2.0346
## 78                2.9042
## 79                1.5632
## 80                1.8086
## 81                1.4628
## 82                2.0534
## 83                2.1416
## 84                2.2424
## 85                2.9344
## 86                2.7222
## 87                1.5936
## 88                3.2388
## 89                3.2208
## 90                1.2966
## 91                1.4916
## 92                2.1358
## 93                3.1324
## 94                2.7180
## 95                1.3838
## 96                1.5372
## 97                0.8984
## 98                1.7834
## 99                3.5396
## 100               3.1372
## 101               2.5664
## 102               0.8620
## 103               3.4338
## 104               3.0344
## 105               2.9846
## 106               3.1338
## 107               1.1546
## 108               4.1910
## 109               3.1220
## 110               1.5086
## 111               1.6026
## 112               2.2130
## 113               4.1408
## 114               3.3070
## 115               1.1188
## 116               3.8114
## 117               1.6558
## 118               2.8988
## 119               3.5332
## 120               3.3978
## 121               1.5260
## 122               4.0460
## 123               2.7008
## 124               2.3864
## 125               2.3252
## 126               5.3860
## 127               1.8424
## 128               2.9692
## 129               2.5404
## 130               2.1400
## 131               3.6812
## 132               4.3462
## 133               1.2704
## 134               2.2262
## 135               2.4932
## 136               1.0014
## 137               2.8382
data5 <- anti_join(data4,data_nontunai)
## Joining with `by = join_by(no.sampel, cabang, jenis.kelamin, usia, pendidikan,
## frekuensi.fashion, nilai.fashion, frekuensi.footwear, nilai.footwear,
## frekuensi.lainnya, nilai.lainnya, total.nilai.tunai, lama.member, promo,
## Total_Frekuensi_Pembelian, Total_Rata_Pembelian)`
data5
##     no.sampel cabang jenis.kelamin usia pendidikan frekuensi.fashion
## 1           2      2             2   33          2                 4
## 2           3      7             1   41          3                 5
## 3           5      3             2   37          4                 2
## 4           6      4             2   39          3                 3
## 5           7     11             2   41          3                 4
## 6           9     12             1   45          3                 4
## 7          10     13             1   52          3                 2
## 8          11      3             2   43          3                 5
## 9          13      5             1   41          2                 4
## 10         14     14             2   50          3                 3
## 11         15     10             1   42          3                 2
## 12         17     10             1   44          3                 2
## 13         18      9             2   38          4                 2
## 14         19     14             1   46          3                 4
## 15         20      2             1   40          4                 3
## 16         22     10             1   47          4                 4
## 17         25      3             1   37          4                 4
## 18         26      3             2   39          4                 3
## 19         28      9             1   28          3                 4
## 20         29      2             1   45          1                 5
## 21         32     12             2   37          2                 2
## 22         35      9             2   32          4                 4
## 23         37      7             1   40          2                 2
## 24         38      8             2   47          2                 1
## 25         40      7             1   46          3                 2
## 26         42     10             2   38          2                 4
## 27         43      4             1   33          3                 2
## 28         45      4             2   48          3                 5
## 29         46      6             1   28          4                 1
## 30         50      1             1   44          3                 5
## 31         53     10             2   34          4                 5
## 32         55     13             1   37          2                 2
## 33         56     10             2   39          4                 5
## 34         60      4             2   38          3                 1
## 35         62      1             1   41          4                 1
## 36         68      5             2   42          2                 3
## 37         69      2             2   42          4                 1
## 38         76     10             2   38          2                 4
## 39         78      6             1   40          4                 2
## 40         81      9             1   40          3                 4
## 41         83     14             1   44          3                 3
## 42         84      6             1   39          3                 1
## 43         85     14             1   45          3                 5
## 44         86      7             2   39          3                 3
## 45         87     10             1   38          1                 1
## 46         91      8             2   48          2                 4
## 47         95      3             1   36          3                 2
## 48        102     11             2   39          3                 4
## 49        107      3             2   31          4                 3
## 50        108      2             2   46          4                 4
## 51        109     11             2   39          3                 2
## 52        110      3             2   42          2                 5
## 53        115      1             1   49          4                 1
## 54        116      9             2   36          3                 5
## 55        119      4             2   41          3                 3
## 56        120      6             2   32          3                 3
## 57        122      4             2   35          4                 5
## 58        124      8             1   45          3                 1
## 59        125      9             2   35          4                 1
## 60        127     10             2   42          2                 2
## 61        129      3             1   31          2                 5
## 62        130     13             2   38          4                 4
## 63        132      1             2   44          4                 4
## 64        133     10             1   38          3                 3
## 65        134     11             1   43          3                 5
## 66        135      4             2   39          1                 4
## 67        136     11             2   44          3                 4
## 68        140     13             2   45          3                 3
## 69        144     11             2   38          4                 4
## 70        145      6             1   41          4                 3
## 71        146     10             1   38          3                 4
## 72        147     13             1   43          3                 5
## 73        148      9             2   32          3                 4
## 74        152     12             2   32          4                 2
## 75        155     11             1   44          2                 4
## 76        156      6             2   34          3                 1
## 77        157     13             2   34          3                 3
## 78        159     10             2   38          3                 4
## 79        160      1             2   37          4                 4
## 80        161     11             2   41          3                 2
## 81        163     12             2   48          3                 6
## 82        166     12             1   38          3                 5
## 83        167     14             2   39          2                 2
## 84        168      7             2   46          3                 3
## 85        171     10             1   37          3                 1
## 86        173      9             1   37          3                 3
## 87        175     11             2   40          3                 4
## 88        176      9             1   49          2                 2
## 89        177      9             2   47          3                 6
## 90        180     10             2   46          3                 3
## 91        182      7             2   43          3                 4
## 92        186      9             2   41          4                 2
## 93        187     12             2   40          3                 3
## 94        190      1             1   39          4                 5
## 95        191      8             1   43          2                 5
## 96        192      8             2   40          3                 5
## 97        194      5             2   45          2                 5
## 98        196     13             2   38          2                 7
## 99        197     13             1   43          4                 3
## 100       198      5             1   38          3                 5
## 101       199      5             2   34          4                 2
## 102       204      2             2   43          3                 2
## 103       205     12             1   41          4                 6
## 104       206     10             2   49          4                 4
## 105       207      1             1   52          2                 2
## 106       208      3             1   47          2                 4
## 107       210     12             2   45          4                 1
## 108       212      8             2   50          4                 4
## 109       213     12             2   37          3                 2
## 110       214      2             1   35          4                 1
## 111       219      9             2   40          3                 5
## 112       220     13             2   42          4                 2
## 113       222      2             2   37          2                 1
## 114       225     14             1   38          4                 4
## 115       228     10             1   41          2                 3
## 116       229      6             2   47          3                 5
## 117       232      5             1   43          3                 3
## 118       233      1             1   37          3                 3
## 119       234      9             2   30          4                 2
## 120       237      9             2   45          3                 3
## 121       239     14             2   38          3                 4
## 122       240     13             1   45          4                 2
## 123       241     12             2   42          3                 7
## 124       244     10             1   41          3                 3
## 125       245      9             1   40          3                 4
## 126       246      4             1   40          2                 4
## 127       247      4             2   41          2                 8
## 128       250      5             2   41          4                 5
## 129       253      4             2   50          3                 2
## 130       255      6             2   41          3                 4
## 131       260      8             2   42          3                 2
## 132       261      6             2   43          3                 3
## 133       262      3             1   54          2                 3
## 134       265      5             1   45          3                 5
## 135       266      4             2   43          3                 4
## 136       268      6             2   37          3                 2
## 137       269      7             1   35          4                 1
## 138       272      4             2   43          3                 1
## 139       273      4             2   47          3                 3
## 140       276     14             2   36          4                 1
## 141       277      5             1   41          4                 5
## 142       278      6             2   44          2                 3
## 143       281      5             2   30          4                 2
## 144       282     10             2   44          3                 3
## 145       283      3             2   35          4                 3
## 146       288     14             2   39          2                 1
## 147       289      4             1   39          4                 3
## 148       292      7             1   37          2                 1
## 149       293      2             1   42          4                 5
## 150       297     13             2   42          2                 5
## 151       298      2             2   44          2                 3
## 152       299      9             2   45          3                 7
## 153       300      4             2   51          3                 1
## 154       303      3             2   36          3                 1
## 155       305      7             2   36          3                 3
## 156       306      4             2   39          3                 2
## 157       307      3             1   37          2                 2
## 158       308      3             2   40          3                 4
## 159       309     12             2   39          3                 2
## 160       312     14             1   35          4                 2
## 161       314      1             2   35          3                 5
## 162       315      5             1   46          2                 2
## 163       316      9             1   38          4                 2
## 164       318      5             1   36          4                 4
## 165       321      7             2   32          1                 1
## 166       322     13             1   37          3                 5
## 167       325      8             2   38          4                 4
## 168       326      3             2   37          4                 4
## 169       328      8             2   40          3                 3
## 170       329     12             1   33          4                 3
## 171       330      8             2   42          4                 5
## 172       331      3             2   50          3                 5
## 173       333      4             1   36          4                 2
## 174       336      9             2   37          3                 3
## 175       337     14             1   52          3                 2
## 176       338      1             2   40          4                 2
## 177       340      7             2   38          4                 3
## 178       341      7             2   40          3                 3
## 179       342     10             2   40          3                 4
## 180       343      1             1   41          3                 2
## 181       345      4             2   35          4                 2
## 182       346      7             2   37          2                 3
## 183       347      4             2   36          4                 6
## 184       348     10             1   30          4                 3
## 185       349     12             1   51          3                 4
## 186       351      3             1   26          1                 3
## 187       357      1             1   43          4                 3
## 188       358     13             2   45          3                 2
##     nilai.fashion frekuensi.footwear nilai.footwear frekuensi.lainnya
## 1          0.4964                  1         0.9162                 4
## 2          0.6008                  1         0.0384                 4
## 3          0.6572                  3         0.1014                 1
## 4          0.3894                  2         0.2674                 3
## 5          1.0230                  6         1.1034                 4
## 6          1.3172                  3         0.8162                 2
## 7          2.2226                  1         2.0606                 2
## 8          0.0566                  2         0.6240                 5
## 9          0.3160                  4         0.9674                 4
## 10         1.3258                  2         0.1734                 3
## 11         0.4534                  4         0.5120                 2
## 12         0.3632                  4         0.8950                 2
## 13         0.3916                  5         0.2436                 2
## 14         0.6452                  1         0.8936                 4
## 15         2.6276                  2         2.4224                 3
## 16         1.2012                  1         0.7414                 4
## 17         0.3396                  3         0.3994                 4
## 18         0.7618                  3         0.4500                 3
## 19         0.7848                  3         0.7248                 4
## 20         1.1748                  5         0.8424                 5
## 21         0.8108                  1         0.9608                 1
## 22         0.5366                  3         0.8378                 4
## 23         0.2558                  3         1.5328                 2
## 24         0.4054                  3         0.6040                 1
## 25         0.7342                  3         0.0678                 2
## 26         0.9574                  2         0.3834                 3
## 27         0.8312                  3         0.0918                 2
## 28         1.4842                  3         1.6854                 5
## 29         0.1166                  2         0.8956                 1
## 30         0.8812                  5         0.5268                 4
## 31         0.7514                  2         0.7062                 5
## 32         0.8472                  3         0.3804                 1
## 33         0.9188                  3         0.2412                 4
## 34         0.4186                  1         1.0326                 1
## 35         0.6080                  6         2.1264                 1
## 36         0.7610                  3         0.6270                 3
## 37         0.1620                  3         0.6238                 1
## 38         1.3980                  4         1.0162                 3
## 39         0.4590                  1         0.6240                 2
## 40         0.6330                  5         0.7566                 4
## 41         0.5388                  1         1.5922                 2
## 42         0.1296                  3         0.3474                 1
## 43         0.3400                  8         1.9844                 4
## 44         0.8304                  5         0.4448                 2
## 45         0.4600                  2         0.1770                 1
## 46         1.8744                  3         0.9792                 3
## 47         0.3940                  3         0.5824                 2
## 48         0.2186                  3         1.2810                 3
## 49         0.4082                  3         0.0812                 3
## 50         1.2380                  4         0.3366                 4
## 51         0.6294                  2         0.5926                 2
## 52         1.1454                  5         0.6360                 4
## 53         0.8804                  2         0.4266                 1
## 54         0.6694                  2         0.1146                 5
## 55         0.4748                  3         1.1652                 3
## 56         0.2250                  6         0.2378                 2
## 57         0.2012                  7         0.6118                 4
## 58         0.3678                  2         0.4492                 1
## 59         0.4078                  1         0.3610                 1
## 60         0.5706                  5         0.4228                 2
## 61         1.0592                  3         0.9524                 5
## 62         0.4648                  3         0.4874                 4
## 63         1.8310                  3         0.5380                 1
## 64         0.2178                  2         1.1318                 3
## 65         1.1836                  4         1.0800                 3
## 66         0.6136                  3         2.2986                 3
## 67         0.6268                  3         0.4458                 4
## 68         0.4804                  3         0.8894                 3
## 69         0.7324                  4         1.0140                 4
## 70         1.7468                  1         0.4926                 3
## 71         0.2702                  6         1.2278                 4
## 72         2.1268                  3         0.8562                 5
## 73         0.6314                  3         1.1274                 3
## 74         0.8290                  4         0.4852                 2
## 75         1.5322                  4         0.5442                 4
## 76         0.4980                  3         1.1466                 1
## 77         0.2708                  3         0.3360                 3
## 78         0.3660                  2         0.9248                 3
## 79         0.2474                  3         0.2372                 3
## 80         1.7790                  4         0.6650                 2
## 81         0.7540                  1         0.2812                 4
## 82         0.8986                  2         0.5356                 5
## 83         1.0564                  1         0.7418                 1
## 84         0.5976                  4         1.5304                 2
## 85         1.2262                  3         1.6790                 1
## 86         0.3568                  2         3.0260                 3
## 87         0.9808                  3         0.2978                 4
## 88         0.1730                  2         0.1484                 1
## 89         2.4374                  2         0.5256                 5
## 90         1.0552                  2         0.5750                 2
## 91         0.3686                  2         0.3594                 3
## 92         1.3294                  2         0.7192                 2
## 93         0.6842                  6         1.3918                 3
## 94         1.2764                  1         0.6838                 5
## 95         0.3400                  3         0.3058                 5
## 96         0.9326                  3         0.9474                 5
## 97         0.9354                  5         0.6242                 4
## 98         0.5800                  3         1.9232                 7
## 99         0.5530                  1         0.4384                 3
## 100        1.5734                  6         0.4618                 3
## 101        0.7866                  2         0.4092                 2
## 102        1.2214                  3         2.0072                 2
## 103        2.3964                  4         0.9520                 6
## 104        0.5972                  3         2.1770                 4
## 105        0.3590                  4         0.3326                 2
## 106        0.2492                  3         0.2214                 4
## 107        0.3408                  1         0.3910                 1
## 108        0.2436                  2         0.7172                 4
## 109        0.8956                  4         0.4326                 2
## 110        1.3954                  2         0.2758                 1
## 111        0.5784                  3         0.2652                 5
## 112        0.7702                  4         1.2130                 2
## 113        0.4772                  2         0.6862                 1
## 114        0.5862                  2         0.2344                 4
## 115        0.3496                  5         0.8922                 3
## 116        0.5366                  1         0.8550                 4
## 117        0.2268                  4         2.5416                 3
## 118        0.9056                  2         0.7444                 3
## 119        0.2576                  2         0.0374                 1
## 120        0.2506                  2         0.9792                 3
## 121        1.3828                  3         1.0122                 4
## 122        0.6338                  7         0.1886                 1
## 123        1.3204                  4         0.5272                 6
## 124        0.6636                  5         0.7778                 3
## 125        1.5552                  4         0.5060                 4
## 126        1.1446                  2         2.0434                 3
## 127        0.2612                  3         0.3566                 7
## 128        0.2798                  4         2.0378                 3
## 129        0.1890                  4         2.0212                 2
## 130        0.5016                  4         0.5542                 4
## 131        0.8986                  3         0.3388                 2
## 132        0.5750                  2         0.5156                 3
## 133        0.7252                  1         1.2182                 3
## 134        1.8028                  2         1.0152                 4
## 135        1.2912                  4         1.2526                 4
## 136        1.1040                  4         0.8454                 2
## 137        1.0292                  4         1.1074                 1
## 138        0.6970                  3         0.2258                 1
## 139        0.6342                  4         1.3430                 3
## 140        0.8420                  6         0.0262                 1
## 141        0.7420                  4         2.6252                 4
## 142        0.6414                  1         0.0536                 2
## 143        0.5034                  3         1.7230                 2
## 144        1.1954                  4         0.1446                 3
## 145        0.4268                  3         0.2746                 2
## 146        0.0296                  6         1.4610                 1
## 147        0.2522                  2         0.6368                 3
## 148        1.7900                  6         1.6212                 1
## 149        0.2348                  5         1.0504                 5
## 150        1.0890                  7         0.7922                 5
## 151        0.8942                  6         0.1610                 3
## 152        1.0590                  1         0.7742                 7
## 153        2.1186                  2         0.9552                 1
## 154        0.2456                  3         0.6676                 1
## 155        1.0658                  3         0.6236                 3
## 156        0.5338                  5         0.1426                 2
## 157        0.9668                  6         0.5726                 2
## 158        0.7010                  2         1.7448                 4
## 159        1.3036                  2         0.9226                 2
## 160        1.0974                  4         0.7766                 2
## 161        0.7478                  1         0.4990                 4
## 162        0.4428                  6         1.8990                 1
## 163        1.6220                  1         0.1848                 2
## 164        0.5714                  3         0.9272                 4
## 165        0.4264                  2         1.0042                 1
## 166        0.3324                  4         2.8930                 5
## 167        0.9878                  2         1.8628                 4
## 168        1.0482                  5         0.4762                 3
## 169        0.4860                  1         0.3444                 3
## 170        0.8978                  4         0.7350                 3
## 171        0.7516                  1         0.6336                 5
## 172        1.5280                  3         0.5508                 5
## 173        0.2638                  3         0.4090                 2
## 174        0.6184                  3         0.4206                 3
## 175        0.4828                  5         0.4836                 2
## 176        0.5936                  7         1.5362                 2
## 177        1.2514                  5         0.2010                 1
## 178        0.6860                  5         1.8456                 3
## 179        0.7442                  2         2.1974                 4
## 180        1.9094                  4         0.9774                 2
## 181        0.9236                  4         0.2220                 2
## 182        0.8974                  3         0.7680                 3
## 183        0.7058                  2         1.7068                 6
## 184        0.4888                  3         0.4812                 3
## 185        1.4932                  1         0.9942                 4
## 186        0.3590                  4         0.7784                 2
## 187        0.4186                  4         0.8074                 3
## 188        0.2066                  2         1.1290                 2
##     nilai.lainnya total.nilai.tunai lama.member promo Total_Frekuensi_Pembelian
## 1          0.2822              0.59          35     1                         9
## 2          0.6252              3.05          39     0                        10
## 3          0.6478              1.06          51     0                         6
## 4          0.5866              0.26          19     0                         8
## 5          0.6484              2.85          21     1                        14
## 6          1.2326              0.30           5     0                         9
## 7          0.2528              6.31          37     0                         5
## 8          0.5114              2.33          48     0                        12
## 9          0.1570              1.55           4     1                        12
## 10         0.2402              2.77          17     0                         8
## 11         0.6652              4.01          16     1                         8
## 12         1.0706              1.10          38     1                         8
## 13         0.3592              2.35          31     0                         9
## 14         0.3618              1.08          49     0                         9
## 15         0.5100              3.55          19     1                         8
## 16         0.2348              4.89          24     0                         9
## 17         0.2118              0.70           2     0                        11
## 18         0.3044              3.58          31     0                         9
## 19         0.3012              1.26          14     0                        11
## 20         0.3474              7.22          47     1                        15
## 21         1.3954              2.31          49     0                         4
## 22         2.2904              6.67          20     0                        11
## 23         0.4908              5.89           7     1                         7
## 24         0.4736              1.25          46     0                         5
## 25         0.1278              1.48          26     1                         7
## 26         0.4108              0.86           6     1                         9
## 27         1.0418              0.12          22     0                         7
## 28         0.7000              9.88          23     1                        13
## 29         0.5226              1.03          25     0                         4
## 30         0.2150              5.68          43     0                        14
## 31         2.5626              9.48          34     0                        12
## 32         0.4930              0.33          45     0                         6
## 33         0.8564              6.71          34     0                        12
## 34         0.3490              0.08          27     0                         3
## 35         0.5546              1.96          24     0                         8
## 36         0.8622              2.62          14     1                         9
## 37         0.4818              2.43          15     0                         5
## 38         0.1332              6.59           4     1                        11
## 39         2.4766              4.22          15     0                         5
## 40         0.2178              2.35          12     0                        13
## 41         0.4408              1.79          39     0                         6
## 42         1.0612              2.03           3     1                         5
## 43         2.5246             23.02          35     1                        17
## 44         0.9076              6.17          44     1                        10
## 45         0.9354              1.74           7     1                         4
## 46         0.4656              1.03          15     1                        10
## 47         0.9508              0.50          16     0                         7
## 48         0.1830              2.94          18     1                        10
## 49         0.8782              1.51          38     0                         9
## 50         0.3918              3.65          39     1                        12
## 51         0.9090              1.98          29     0                         6
## 52         0.4534              8.80          44     0                        14
## 53         1.6912              1.30           8     0                         4
## 54         0.6176              6.45          43     0                        12
## 55         1.0652              5.88          30     0                         9
## 56         1.5124              3.12          44     0                        11
## 57         1.6506              8.44           9     0                        16
## 58         0.6074              1.31          31     0                         4
## 59         0.6848              0.48          28     0                         3
## 60         1.2076              0.25          11     0                         9
## 61         1.2964              8.80          40     1                        13
## 62         0.2152              2.36          34     1                        11
## 63         0.2534              3.01          24     0                         8
## 64         0.0688              1.62          33     0                         8
## 65         0.0214              1.82          25     0                        12
## 66         1.3102              7.18          43     0                        10
## 67         1.0148              4.18          26     1                        11
## 68         0.6438              2.41          23     0                         9
## 69         1.1898              5.51          27     1                        12
## 70         1.1866              4.40          19     1                         7
## 71         0.6104              6.11           6     0                        14
## 72         0.2924             13.32          49     1                        13
## 73         0.5010              4.77          42     0                        10
## 74         0.2536              1.00          50     0                         8
## 75         0.1308              2.08          41     0                        12
## 76         0.7042              2.37          26     1                         5
## 77         1.6844              0.67          15     0                         9
## 78         0.3974              0.56          37     0                         9
## 79         0.4072              1.80          13     0                        10
## 80         0.2986              1.29           3     1                         8
## 81         1.3696              4.80          33     0                        11
## 82         0.8170              8.48           6     0                        12
## 83         1.1100              0.11          32     0                         4
## 84         1.2146              9.73           2     1                         9
## 85         1.3128              5.34          25     0                         5
## 86         2.2050              8.43          13     0                         8
## 87         0.9728              7.55          46     0                        11
## 88         0.4654              1.05           1     0                         5
## 89         1.8728             21.26          16     0                        13
## 90         0.1638              1.01          32     0                         7
## 91         0.8560              0.67          13     1                         9
## 92         0.7204              5.12          18     0                         6
## 93         0.5094              0.78          47     0                        12
## 94         0.3480              0.68          21     0                        11
## 95         0.7630              2.79          11     0                        13
## 96         1.1862              8.32           5     0                        13
## 97         0.2732              7.26          28     1                        14
## 98         0.8286             15.53           8     1                        17
## 99         0.2408              2.51          37     1                         7
## 100        0.7502              3.81          49     0                        14
## 101        2.5248              2.44          48     1                         6
## 102        0.7556              3.65          16     1                         7
## 103        1.4082             22.95          13     0                        16
## 104        0.1834              0.31          18     1                        11
## 105        0.7360              1.83          45     0                         8
## 106        0.6562              3.21          23     1                        11
## 107        0.5796              0.47          17     0                         3
## 108        0.3580              2.51           1     1                        10
## 109        1.2678              3.71          42     0                         8
## 110        0.3122              0.12          45     1                         4
## 111        0.8686              2.24          51     0                        13
## 112        0.6332              7.00          40     0                         8
## 113        1.6842              0.17          48     0                         4
## 114        0.9690              2.28          27     0                        10
## 115        1.1760              1.36           2     1                        11
## 116        2.7596              9.92          44     1                        10
## 117        0.4828              3.06          35     1                        10
## 118        0.9008              1.90          30     0                         8
## 119        2.1968              1.65          41     0                         5
## 120        0.2686              0.14          10     0                         8
## 121        0.7388              9.80          46     0                        11
## 122        0.2176              2.09          43     1                        10
## 123        0.4314              2.57          21     0                        17
## 124        0.9384              6.99          34     0                        11
## 125        0.8770              2.58          39     0                        12
## 126        0.5912              6.57          31     1                         9
## 127        0.4206              3.71          36     0                        18
## 128        0.8114              9.42          40     1                        12
## 129        1.0794              1.05          27     1                         8
## 130        0.1032              1.40          14     1                        12
## 131        0.5216              3.81           8     0                         7
## 132        1.1764              1.69          14     1                         8
## 133        0.2212              1.39          15     0                         7
## 134        0.6226              9.53          45     1                        11
## 135        0.3670              4.23          24     0                        12
## 136        1.6854              5.95          18     0                         8
## 137        0.4236              4.83          35     0                         6
## 138        0.8224              1.39           6     0                         5
## 139        0.6236              4.39          39     1                        10
## 140        1.8082              0.48          28     0                         8
## 141        0.8674              2.74          36     1                        13
## 142        0.8636              0.24          20     0                         6
## 143        0.9586              5.91           7     0                         7
## 144        1.5308              6.24          38     0                        10
## 145        0.8832              3.13          31     0                         8
## 146        0.9144              8.69           9     0                         8
## 147        1.1860              0.16          40     0                         8
## 148        2.2588              0.70          35     1                         8
## 149        0.4182              4.60          38     1                        15
## 150        0.2330             10.32          42     1                        17
## 151        0.9728              5.47          25     0                        12
## 152        0.2486              9.78          19     0                        15
## 153        0.9372              4.75           8     1                         4
## 154        1.6538              0.71          20     0                         5
## 155        0.7504              6.42          27     1                         9
## 156        0.5528              2.55          45     0                         9
## 157        1.1496              0.77           2     0                        10
## 158        0.9168              2.46          32     0                        10
## 159        1.1124              2.38           1     0                         6
## 160        0.4480              5.40          41     0                         8
## 161        1.5842              8.92          42     0                        10
## 162        0.7786             11.40          17     1                         9
## 163        0.4350              2.89          47     0                         5
## 164        0.4814              3.61          36     0                        11
## 165        0.4658              1.08          24     1                         4
## 166        0.4912              8.61          23     1                        14
## 167        0.2658              1.19          38     0                        10
## 168        2.5678              4.65          33     0                        12
## 169        0.5506              1.32          50     0                         7
## 170        0.7904              0.46           8     0                        10
## 171        0.3482              5.00           7     0                        11
## 172        0.9748             12.26           8     1                        13
## 173        0.8000              2.49          48     0                         7
## 174        0.0928              1.89          17     0                         9
## 175        1.5290              1.04          46     0                         9
## 176        0.0934              3.71          15     0                        11
## 177        1.4606              5.34          42     1                         9
## 178        0.8286              8.12          43     1                        11
## 179        0.4026              0.45          38     0                        10
## 180        0.7258              7.87           7     0                         8
## 181        0.6366              2.81           9     0                         8
## 182        0.5416              6.60          24     1                         9
## 183        0.3698              0.99          16     0                        14
## 184        0.0484              0.14          22     0                         9
## 185        0.6908              0.18          22     0                         9
## 186        0.2330              1.40          44     0                         9
## 187        1.9580              7.54          43     0                        10
## 188        0.3798              0.07          37     1                         6
##     Total_Rata_Pembelian
## 1                 1.6948
## 2                 1.2644
## 3                 1.4064
## 4                 1.2434
## 5                 2.7748
## 6                 3.3660
## 7                 4.5360
## 8                 1.1920
## 9                 1.4404
## 10                1.7394
## 11                1.6306
## 12                2.3288
## 13                0.9944
## 14                1.9006
## 15                5.5600
## 16                2.1774
## 17                0.9508
## 18                1.5162
## 19                1.8108
## 20                2.3646
## 21                3.1670
## 22                3.6648
## 23                2.2794
## 24                1.4830
## 25                0.9298
## 26                1.7516
## 27                1.9648
## 28                3.8696
## 29                1.5348
## 30                1.6230
## 31                4.0202
## 32                1.7206
## 33                2.0164
## 34                1.8002
## 35                3.2890
## 36                2.2502
## 37                1.2676
## 38                2.5474
## 39                3.5596
## 40                1.6074
## 41                2.5718
## 42                1.5382
## 43                4.8490
## 44                2.1828
## 45                1.5724
## 46                3.3192
## 47                1.9272
## 48                1.6826
## 49                1.3676
## 50                1.9664
## 51                2.1310
## 52                2.2348
## 53                2.9982
## 54                1.4016
## 55                2.7052
## 56                1.9752
## 57                2.4636
## 58                1.4244
## 59                1.4536
## 60                2.2010
## 61                3.3080
## 62                1.1674
## 63                2.6224
## 64                1.4184
## 65                2.2850
## 66                4.2224
## 67                2.0874
## 68                2.0136
## 69                2.9362
## 70                3.4260
## 71                2.1084
## 72                3.2754
## 73                2.2598
## 74                1.5678
## 75                2.2072
## 76                2.3488
## 77                2.2912
## 78                1.6882
## 79                0.8918
## 80                2.7426
## 81                2.4048
## 82                2.2512
## 83                2.9082
## 84                3.3426
## 85                4.2180
## 86                5.5878
## 87                2.2514
## 88                0.7868
## 89                4.8358
## 90                1.7940
## 91                1.5840
## 92                2.7690
## 93                2.5854
## 94                2.3082
## 95                1.4088
## 96                3.0662
## 97                1.8328
## 98                3.3318
## 99                1.2322
## 100               2.7854
## 101               3.7206
## 102               3.9842
## 103               4.7566
## 104               2.9576
## 105               1.4276
## 106               1.1268
## 107               1.3114
## 108               1.3188
## 109               2.5960
## 110               1.9834
## 111               1.7122
## 112               2.6164
## 113               2.8476
## 114               1.7896
## 115               2.4178
## 116               4.1512
## 117               3.2512
## 118               2.5508
## 119               2.4918
## 120               1.4984
## 121               3.1338
## 122               1.0400
## 123               2.2790
## 124               2.3798
## 125               2.9382
## 126               3.7792
## 127               1.0384
## 128               3.1290
## 129               3.2896
## 130               1.1590
## 131               1.7590
## 132               2.2670
## 133               2.1646
## 134               3.4406
## 135               2.9108
## 136               3.6348
## 137               2.5602
## 138               1.7452
## 139               2.6008
## 140               2.6764
## 141               4.2346
## 142               1.5586
## 143               3.1850
## 144               2.8708
## 145               1.5846
## 146               2.4050
## 147               2.0750
## 148               5.6700
## 149               1.7034
## 150               2.1142
## 151               2.0280
## 152               2.0818
## 153               4.0110
## 154               2.5670
## 155               2.4398
## 156               1.2292
## 157               2.6890
## 158               3.3626
## 159               3.3386
## 160               2.3220
## 161               2.8310
## 162               3.1204
## 163               2.2418
## 164               1.9800
## 165               1.8964
## 166               3.7166
## 167               3.1164
## 168               4.0922
## 169               1.3810
## 170               2.4232
## 171               1.7334
## 172               3.0536
## 173               1.4728
## 174               1.1318
## 175               2.4954
## 176               2.2232
## 177               2.9130
## 178               3.3602
## 179               3.3442
## 180               3.6126
## 181               1.7822
## 182               2.2070
## 183               2.7824
## 184               1.0184
## 185               3.1782
## 186               1.3704
## 187               3.1840
## 188               1.7154
lm(total.nilai.tunai~Total_Frekuensi_Pembelian+Total_Rata_Pembelian, data = data5)
## 
## Call:
## lm(formula = total.nilai.tunai ~ Total_Frekuensi_Pembelian + 
##     Total_Rata_Pembelian, data = data5)
## 
## Coefficients:
##               (Intercept)  Total_Frekuensi_Pembelian  
##                   -5.6495                     0.5662  
##      Total_Rata_Pembelian  
##                    1.7833
data4n <- data4 %>% select(-no.sampel, -frekuensi.fashion, -frekuensi.footwear, -frekuensi.lainnya, -Total_Rata_Pembelian)

Fix Anomaly

data2n <- data2 %>% select(-frekuensi.fashion,-frekuensi.footwear, -frekuensi.lainnya)
#data3_newvar <- data3_newvar %>% select(-frekuensi.fashion,-frekuensi.footwear, -frekuensi.lainnya, -nilai.fashion, -nilai.footwear,-nilai.lainnya)
data4_fix <- data4
datadiskret <- data3_diskret %>% select(-frekuensi.fashion,-frekuensi.footwear, -frekuensi.lainnya)
datadiskret2 <- data3_diskret2 %>% select(-frekuensi.fashion,-frekuensi.footwear, -frekuensi.lainnya)

Mengatasi Oulier

step_outlier_fix <- function(b){ 
  for (i in 1:(ncol(b))){
    x <- b[,i]
    if (!is.factor(x)){
      # Menentukan kuantil
      Q1 <-  quantile(x, 0.25)
      Q3 <-  quantile(x, 0.75)
      IQR <- Q3 - Q1
      
      # Batas Outlier
      down_outlier_min <- Q1 - (1.5*IQR)
      down_outlier_max <- Q1 - (3.0*IQR)
      up_outlier_min <- Q3 + (1.5*IQR)
      up_outlier_max <- Q3 + (3.0*IQR) 
      
      # Inisialisasi Nilai Variabel
      up_outliers <- 0
      up_extreme_outliers <- 0
      down_outliers <- 0
      down_extreme_outliers <- 0
      up_extreme_value <- NULL
      down_extreme_value <- NULL
        
      for (j in 1:length(x)){
        if (x[j] < down_outlier_min && x[j] >= down_outlier_max){
          # Increment jumlah outlier
          down_outliers <- down_outliers + 1
           x[j] <- mean(x)
        }
        else if (x[j] < down_outlier_max){
          # Increment jumlah outlier
          down_extreme_outliers  <- down_extreme_outliers + 1
          x[j] <- mean(x)
        }
        else if (x[j] > up_outlier_min && x[j] <= up_outlier_max){
          # Increment jumlah outlier
          up_outliers <- up_outliers + 1
           x[j] <- mean(x)
        }
        else if (x[j] > up_outlier_max){
          # Increment jumlah outlier
          up_extreme_outliers <- up_extreme_outliers + 1
           x[j] <- mean(x)
        }
        b[j, i] <- x[j] 
      }
      cat("Jumlah Outlier atas x",i, " :", up_outliers, "\n")
      cat("Jumlah Outlier bawah x",i, " :", down_outliers, "\n")
      cat("Jumlah Ekstrem atas x",i, " :", up_extreme_outliers, "\n")
      cat("Jumlah Ekstrem bawah x",i, " :", down_extreme_outliers, "\n \n")
    }
  }
  return(b)
}
step_outlier_fix2 <- function(b){ 
  for (i in 1:(ncol(b))){
    x <- b[,i]
    if (!is.factor(x)){
      # Menentukan kuantil
      Q1 <-  quantile(x, 0.25)
      Q3 <-  quantile(x, 0.75)
      IQR <- Q3 - Q1
      
      # Batas Outlier
      down_outlier_min <- Q1 - (1.5*IQR)
      down_outlier_max <- Q1 - (3.0*IQR)
      up_outlier_min <- Q3 + (1.5*IQR)
      up_outlier_max <- Q3 + (3.0*IQR) 
      
      # Inisialisasi Nilai Variabel
      up_outliers <- 0
      up_extreme_outliers <- 0
      down_outliers <- 0
      down_extreme_outliers <- 0
      up_extreme_value <- NULL
      down_extreme_value <- NULL
        
      for (j in 1:length(x)){
        if (x[j] < down_outlier_min && x[j] >= down_outlier_max){
          # Increment jumlah outlier
          down_outliers <- down_outliers + 1
           #x[j] <- mean(x)
        }
        else if (x[j] < down_outlier_max){
          # Increment jumlah outlier
          down_extreme_outliers  <- down_extreme_outliers + 1
          x[j] <- mean(x)
        }
        else if (x[j] > up_outlier_min && x[j] <= up_outlier_max){
          # Increment jumlah outlier
          up_outliers <- up_outliers + 1
           #x[j] <- mean(x)
        }
        else if (x[j] > up_outlier_max){
          # Increment jumlah outlier
          up_extreme_outliers <- up_extreme_outliers + 1
           x[j] <- mean(x)
        }
        b[j, i] <- x[j] 
      }
      cat("Jumlah Outlier atas x",i, " :", up_outliers, "\n")
      cat("Jumlah Outlier bawah x",i, " :", down_outliers, "\n")
      cat("Jumlah Ekstrem atas x",i, " :", up_extreme_outliers, "\n")
      cat("Jumlah Ekstrem bawah x",i, " :", down_extreme_outliers, "\n \n")
    }
  }
  return(b)
}
#data2_outlier <- step_outlier_fix(data2n)
#data2_outlier2 <- step_outlier_fix2(data2n)
#data3_outlier <- step_outlier_fix(data3n) 
#data3_outlier2 <- step_outlier_fix2(data3n) 

Raw Data Splitting

set.seed(123)
basic_split <- initial_split(data = data[-1],
                             prop = 0.8,
                             strata = "promo")
tidy(basic_split) %>% count(Data)
## # A tibble: 2 × 2
##   Data           n
##   <chr>      <int>
## 1 Analysis     286
## 2 Assessment    72
data_training <- training(basic_split)
#df.train <- df.train
data_testing <- testing(basic_split)
#df.test <- df.test
dim(data_training);dim(data_testing)
## [1] 286  13
## [1] 72 13

Data Splitting FENOM

set.seed(123)
basic_split1 <- initial_split(data = data2n,
                             prop = 0.8,
                             strata = "promo")
tidy(basic_split1) %>% count(Data)
## # A tibble: 2 × 2
##   Data           n
##   <chr>      <int>
## 1 Analysis     259
## 2 Assessment    66
data_training_fenom <- training(basic_split1)
data_testing_fenom <- testing(basic_split1)
data_training_fenom <- data_training_fenom %>% select(-no.sampel)
data_testing_fenom <- data_testing_fenom %>% select(-no.sampel)
dim(data_training_fenom);dim(data_testing_fenom)
## [1] 259  11
## [1] 66 11

Data Splitting One Hot Encoding + FE + Anomaly data + Diskretasi usia dan lama member

Praproses dulu gan

pra_proses1 <- recipe(promo~., data = datadiskret2[-1]) %>%
  step_dummy(all_factor_predictors()) %>% 
  step_normalize(all_numeric_predictors(), means= 0, sds = 1)

pra_proses1f <- pra_proses1 %>%
  prep() %>%
  bake(new_data = NULL)
set.seed(121)
basic_split2 <- initial_split(data = pra_proses1f,
                             prop = 0.8,
                             strata = "promo")
tidy(basic_split2) %>% count(Data)
## # A tibble: 2 × 2
##   Data           n
##   <chr>      <int>
## 1 Analysis     259
## 2 Assessment    66
data_training_diskret2 <- training(basic_split2)
data_testing_diskret2 <- testing(basic_split2)
#data_training_diskret2 <- data_training_diskret2 %>% select(-no.sampel)
#data_testing_diskret2 <- data_testing_diskret2 %>% select(-no.sampel)
dim(data_training_diskret2);dim(data_testing_diskret2)
## [1] 259  29
## [1] 66 29

Data Splitting One Hot Encoding + FE + Anomaly DAta + Diskretasi Usia

Praproses dulu gan

pra_proses2 <- recipe(promo~., data = datadiskret[-1]) %>%
  step_dummy(all_factor_predictors()) %>% 
  step_normalize(all_numeric_predictors(), means= 0, sds = 1)

pra_proses2f <- pra_proses2 %>%
  prep() %>%
  bake(new_data = NULL)
set.seed(123)
basic_split3 <- initial_split(data = pra_proses2f,
                             prop = 0.8,
                             strata = "promo")
tidy(basic_split3) %>% count(Data)
## # A tibble: 2 × 2
##   Data           n
##   <chr>      <int>
## 1 Analysis     259
## 2 Assessment    66
data_training_diskret <- training(basic_split3)
data_testing_diskret <- testing(basic_split3)
data_training_diskret <- data_training_diskret
data_testing_diskret <- data_testing_diskret 
dim(data_training_diskret);dim(data_testing_diskret)
## [1] 259  26
## [1] 66 26

Data Splitting Standarisasi all

Praproses dulu gan

data_standard <- recipe(promo~.,data = data) %>%
  #step_corr(all_numeric_predictors(), threshold = .8) %>%
  step_normalize(all_numeric_predictors(),means = 0, sds = 1)
data_standardfix <- data_standard %>%
  prep() %>% 
  bake(new_data = NULL)
set.seed(123)
basic_split4 <- initial_split(data = data_standardfix,
                             prop = 0.8,
                             strata = "promo")
tidy(basic_split4) %>% count(Data)
## # A tibble: 2 × 2
##   Data           n
##   <chr>      <int>
## 1 Analysis     286
## 2 Assessment    72
data_training_anom <- training(basic_split4)
data_testing_anom <- testing(basic_split4)
data_training_anom <- data_training_anom[-1]
data_testing_anom <- data_testing_anom[-1]
dim(data_training_anom);dim(data_testing_anom)
## [1] 286  13
## [1] 72 13

Data Splitting FENOM Standarisasi

Praproses dulu gan

data_standardfenom <- recipe(promo~., data = data2n[-1]) %>%
  step_normalize(all_numeric_predictors(), means= 0, sds = 1)

data_standardfenomfix <- data_standardfenom %>%
  prep() %>%
  bake(new_data = NULL)
set.seed(123)
basic_split4 <- initial_split(data = data_standardfenomfix,
                             prop = 0.8,
                             strata = "promo")
tidy(basic_split4) %>% count(Data)
## # A tibble: 2 × 2
##   Data           n
##   <chr>      <int>
## 1 Analysis     259
## 2 Assessment    66
data_training_sfenom <- training(basic_split4)
data_testing_sfenom <- testing(basic_split4)
data_training_sfenom <- data_training_sfenom 
data_testing_sfenom <- data_testing_sfenom
dim(data_training_sfenom);dim(data_testing_sfenom)
## [1] 259  11
## [1] 66 11

Data Splitting FENOM +regresi

Praproses dulu gan

data_fenomregresi <- recipe(promo~., data = data4n) %>%
  step_normalize(all_numeric_predictors(), means= 0, sds = 1)

data_fenomregresifix <- data_fenomregresi %>%
  prep() %>%
  bake(new_data = NULL)
set.seed(123)
basic_split4 <- initial_split(data = data_fenomregresifix,
                             prop = 0.8,
                             strata = "promo")
tidy(basic_split4) %>% count(Data)
## # A tibble: 2 × 2
##   Data           n
##   <chr>      <int>
## 1 Analysis     259
## 2 Assessment    66
data_training_fenomregresi <- training(basic_split4)
data_testing_fenomregresi <- testing(basic_split4)
data_training_fenomregresi <- data_training_fenomregresi 
data_testing_fenomregresi <- data_testing_fenomregresi
dim(data_training_fenomregresi);dim(data_testing_fenomregresi)
## [1] 259  11
## [1] 66 11
a <- data_training_sfenom %>% 
count(promo) %>% 
mutate(percent=n*100/sum(n),label=str_c(round(percent,2),"%")) %>% 
ggplot(aes(x="",y=n,fill=promo))+
  geom_col()+
  geom_text(aes(label = label),
            position = position_stack(vjust = 0.5)) +
  coord_polar(theta = "y")+
  theme_cowplot()
ggsave("Imbalance.png", a)
## Saving 7 x 5 in image

Underbalancing Data

data_training_under <- ovun.sample(promo ~ ., data = data_training_sfenom, method = "under",N = 174)$data
table(data_training_under$promo)
## 
##  0  1 
## 87 87
a <- data_training_under %>% 
  count(promo) %>% 
  mutate(percent = n * 100 / sum(n),
         label = str_c("", n, ": ", round(percent, 2), "%")) %>%  # Menambahkan jumlah observasi
  ggplot(aes(x = "", y = n, fill = factor(promo))) +  # Pastikan promo adalah faktor
  geom_col() +
  geom_text(aes(label = label), position = position_stack(vjust = 0.5)) +
  coord_polar(theta = "y") +
  theme_cowplot()

ggsave("Imbalance.png", a, width = 10, height = 8)  # Spesifikasikan ukuran sesuai kebutuhan

Overbalancing Data

data_training_ovr <- ovun.sample(promo ~ ., data = data_training_sfenom, method = "over",N = 344)$data
table(data_training_ovr$promo)
## 
##   0   1 
## 172 172

Hybridbalancing Data

data_training_hyb <- ovun.sample(promo ~ ., data = data_training_sfenom, method = "over",N = 304)$data
table(data_training_hyb$promo)
## 
##   0   1 
## 172 132
data_training_hyb_final <- ovun.sample(promo ~ ., data = data_training_hyb, method = "under",N = 264)$data
table(data_training_hyb_final$promo)
## 
##   0   1 
## 132 132
a <- data_training_hyb_final %>% 
  count(promo) %>% 
  mutate(percent = n * 100 / sum(n),
         label = str_c("", n, ": ", round(percent, 2), "%")) %>%  # Menambahkan jumlah observasi
  ggplot(aes(x = "", y = n, fill = factor(promo))) +  # Pastikan promo adalah faktor
  geom_col() +
  geom_text(aes(label = label), position = position_stack(vjust = 0.5)) +
  coord_polar(theta = "y") +
  theme_cowplot()

ggsave("Hybridsampling.png", a, width = 10, height = 8)  # Spesifikasikan ukuran sesuai kebutuhan
data_training_under <- ovun.sample(promo ~ ., data = data_training_sfenom, method = "under",N = 174)$data
table(data_training_under$promo)
## 
##  0  1 
## 87 87
a <- data_training_ovr %>% 
  count(promo) %>% 
  mutate(percent = n * 100 / sum(n),
         label = str_c("", n, ": ", round(percent, 2), "%")) %>%  # Menambahkan jumlah observasi
  ggplot(aes(x = "", y = n, fill = factor(promo))) +  # Pastikan promo adalah faktor
  geom_col() +
  geom_text(aes(label = label), position = position_stack(vjust = 0.5)) +
  coord_polar(theta = "y") +
  theme_cowplot()

ggsave("Oversampling.png", a, width = 10, height = 8)  # Spesifikasikan ukuran sesuai kebutuhan

Test Praproses

pra_proses1 <- recipe(promo~.,data = data_training_under) %>%
  #step_corr(all_numeric_predictors(), threshold = .8) %>%
  step_discretize(all_factor_predictors(), min_unique = 4)  
  #step_discretize(lama.member, min_unique = 5)  

Pemodelan SVM 1

SVM Model

library(e1071)
## Warning: package 'e1071' was built under R version 4.3.3
## 
## Attaching package: 'e1071'
## The following object is masked from 'package:mlr3verse':
## 
##     tune
## The following object is masked from 'package:tune':
## 
##     tune
## The following object is masked from 'package:rsample':
## 
##     permutations
## The following object is masked from 'package:parsnip':
## 
##     tune

Model SVM Data training

set.seed(123)
tuningsvm <- tune(svm,promo~.,data= data_training,
                  ranges=list(kernel=c("radial"),cost = c(1,2,5,10,13,15), 
                              gamma = c(0.01,0.1,1,2,5,10)))
trainonly <- tuningsvm$best.model
trainonly
## 
## Call:
## best.tune(METHOD = svm, train.x = promo ~ ., data = data_training, 
##     ranges = list(kernel = c("radial"), cost = c(1, 2, 5, 10, 13, 
##         15), gamma = c(0.01, 0.1, 1, 2, 5, 10)))
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  radial 
##        cost:  10 
## 
## Number of Support Vectors:  190

Model SVM Underbalancing + standarfenom

set.seed(123)
tuningsvm_under <- tune(svm,promo~.,data= data_training_under,
                  ranges=list(kernel=c("polynomial","sigmoid", "linear"),cost = c(1,2,3,4,5,8,10), gamma = c(0.01,0.1,0.5,0.9)))
trainonly_under <- tuningsvm_under$best.model
trainonly_under
## 
## Call:
## best.tune(METHOD = svm, train.x = promo ~ ., data = data_training_under, 
##     ranges = list(kernel = c("polynomial", "sigmoid", "linear"), 
##         cost = c(1, 2, 3, 4, 5, 8, 10), gamma = c(0.01, 0.1, 0.5, 
##             0.9)))
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  linear 
##        cost:  5 
## 
## Number of Support Vectors:  93
summary(tuningsvm_under)
## 
## Parameter tuning of 'svm':
## 
## - sampling method: 10-fold cross validation 
## 
## - best parameters:
##  kernel cost gamma
##  linear    5  0.01
## 
## - best performance: 0.2571895 
## 
## - Detailed performance results:
##        kernel cost gamma     error dispersion
## 1  polynomial    1  0.01 0.5800654 0.05518588
## 2     sigmoid    1  0.01 0.4460784 0.11656537
## 3      linear    1  0.01 0.2862745 0.13342515
## 4  polynomial    2  0.01 0.5800654 0.05518588
## 5     sigmoid    2  0.01 0.4287582 0.12735132
## 6      linear    2  0.01 0.2859477 0.11338661
## 7  polynomial    3  0.01 0.5800654 0.05518588
## 8     sigmoid    3  0.01 0.4062092 0.11221683
## 9      linear    3  0.01 0.2741830 0.11175483
## 10 polynomial    4  0.01 0.5800654 0.05518588
## 11    sigmoid    4  0.01 0.3888889 0.11989501
## 12     linear    4  0.01 0.2624183 0.10218829
## 13 polynomial    5  0.01 0.5800654 0.05518588
## 14    sigmoid    5  0.01 0.3885621 0.12445456
## 15     linear    5  0.01 0.2571895 0.11084433
## 16 polynomial    8  0.01 0.5800654 0.05518588
## 17    sigmoid    8  0.01 0.3375817 0.11202633
## 18     linear    8  0.01 0.2735294 0.12814320
## 19 polynomial   10  0.01 0.5800654 0.05518588
## 20    sigmoid   10  0.01 0.3369281 0.13042314
## 21     linear   10  0.01 0.2735294 0.12814320
## 22 polynomial    1  0.10 0.4290850 0.15755001
## 23    sigmoid    1  0.10 0.3437908 0.11606088
## 24     linear    1  0.10 0.2862745 0.13342515
## 25 polynomial    2  0.10 0.4405229 0.16865053
## 26    sigmoid    2  0.10 0.3333333 0.07513971
## 27     linear    2  0.10 0.2859477 0.11338661
## 28 polynomial    3  0.10 0.3892157 0.15831938
## 29    sigmoid    3  0.10 0.3506536 0.09571253
## 30     linear    3  0.10 0.2741830 0.11175483
## 31 polynomial    4  0.10 0.3950980 0.14523153
## 32    sigmoid    4  0.10 0.3627451 0.11520091
## 33     linear    4  0.10 0.2624183 0.10218829
## 34 polynomial    5  0.10 0.3552288 0.12348397
## 35    sigmoid    5  0.10 0.3673203 0.11805183
## 36     linear    5  0.10 0.2571895 0.11084433
## 37 polynomial    8  0.10 0.3493464 0.12188223
## 38    sigmoid    8  0.10 0.3735294 0.09915906
## 39     linear    8  0.10 0.2735294 0.12814320
## 40 polynomial   10  0.10 0.3379085 0.12207437
## 41    sigmoid   10  0.10 0.3562092 0.09791810
## 42     linear   10  0.10 0.2735294 0.12814320
## 43 polynomial    1  0.50 0.3496732 0.12091994
## 44    sigmoid    1  0.50 0.4137255 0.10824620
## 45     linear    1  0.50 0.2862745 0.13342515
## 46 polynomial    2  0.50 0.3496732 0.12091994
## 47    sigmoid    2  0.50 0.3738562 0.12982812
## 48     linear    2  0.50 0.2859477 0.11338661
## 49 polynomial    3  0.50 0.3496732 0.12091994
## 50    sigmoid    3  0.50 0.4084967 0.12096900
## 51     linear    3  0.50 0.2741830 0.11175483
## 52 polynomial    4  0.50 0.3496732 0.12091994
## 53    sigmoid    4  0.50 0.4366013 0.15289555
## 54     linear    4  0.50 0.2624183 0.10218829
## 55 polynomial    5  0.50 0.3496732 0.12091994
## 56    sigmoid    5  0.50 0.4254902 0.07434271
## 57     linear    5  0.50 0.2571895 0.11084433
## 58 polynomial    8  0.50 0.3496732 0.12091994
## 59    sigmoid    8  0.50 0.4026144 0.08610214
## 60     linear    8  0.50 0.2735294 0.12814320
## 61 polynomial   10  0.50 0.3496732 0.12091994
## 62    sigmoid   10  0.50 0.3849673 0.07453401
## 63     linear   10  0.50 0.2735294 0.12814320
## 64 polynomial    1  0.90 0.3496732 0.12091994
## 65    sigmoid    1  0.90 0.4254902 0.06897756
## 66     linear    1  0.90 0.2862745 0.13342515
## 67 polynomial    2  0.90 0.3496732 0.12091994
## 68    sigmoid    2  0.90 0.4362745 0.11695157
## 69     linear    2  0.90 0.2859477 0.11338661
## 70 polynomial    3  0.90 0.3496732 0.12091994
## 71    sigmoid    3  0.90 0.4316993 0.13998097
## 72     linear    3  0.90 0.2741830 0.11175483
## 73 polynomial    4  0.90 0.3496732 0.12091994
## 74    sigmoid    4  0.90 0.4532680 0.09168529
## 75     linear    4  0.90 0.2624183 0.10218829
## 76 polynomial    5  0.90 0.3496732 0.12091994
## 77    sigmoid    5  0.90 0.4186275 0.07080526
## 78     linear    5  0.90 0.2571895 0.11084433
## 79 polynomial    8  0.90 0.3496732 0.12091994
## 80    sigmoid    8  0.90 0.4245098 0.07770863
## 81     linear    8  0.90 0.2735294 0.12814320
## 82 polynomial   10  0.90 0.3496732 0.12091994
## 83    sigmoid   10  0.90 0.4362745 0.08817812
## 84     linear   10  0.90 0.2735294 0.12814320

Model SVM Overbalancing

set.seed(123)
tuningsvm_over <- tune(svm,promo~.,data= data_training_ovr,
                  ranges=list(kernel=c("polynomial","sigmoid", "linear"),cost = c(1,2,3,4,5,8,10), gamma = c(0.01,0.1,0.5,0.9)))
trainonly_over <- tuningsvm_over$best.model
trainonly_over
## 
## Call:
## best.tune(METHOD = svm, train.x = promo ~ ., data = data_training_ovr, 
##     ranges = list(kernel = c("polynomial", "sigmoid", "linear"), 
##         cost = c(1, 2, 3, 4, 5, 8, 10), gamma = c(0.01, 0.1, 0.5, 
##             0.9)))
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  polynomial 
##        cost:  10 
##      degree:  3 
##      coef.0:  0 
## 
## Number of Support Vectors:  216
summary(tuningsvm_over)
## 
## Parameter tuning of 'svm':
## 
## - sampling method: 10-fold cross validation 
## 
## - best parameters:
##      kernel cost gamma
##  polynomial   10   0.1
## 
## - best performance: 0.1654622 
## 
## - Detailed performance results:
##        kernel cost gamma     error dispersion
## 1  polynomial    1  0.01 0.5467227 0.03584821
## 2     sigmoid    1  0.01 0.3513445 0.05388128
## 3      linear    1  0.01 0.2902521 0.07615875
## 4  polynomial    2  0.01 0.5467227 0.03584821
## 5     sigmoid    2  0.01 0.3221008 0.06124283
## 6      linear    2  0.01 0.2728571 0.07279147
## 7  polynomial    3  0.01 0.5467227 0.03584821
## 8     sigmoid    3  0.01 0.3220168 0.07167673
## 9      linear    3  0.01 0.2757143 0.08106759
## 10 polynomial    4  0.01 0.5467227 0.03584821
## 11    sigmoid    4  0.01 0.3190756 0.07482406
## 12     linear    4  0.01 0.2786555 0.07996421
## 13 polynomial    5  0.01 0.5467227 0.03584821
## 14    sigmoid    5  0.01 0.3163025 0.07718518
## 15     linear    5  0.01 0.2815966 0.08230476
## 16 polynomial    8  0.01 0.5467227 0.03584821
## 17    sigmoid    8  0.01 0.2872269 0.08210275
## 18     linear    8  0.01 0.2932773 0.06674312
## 19 polynomial   10  0.01 0.5467227 0.03584821
## 20    sigmoid   10  0.01 0.2929412 0.09824884
## 21     linear   10  0.01 0.2932773 0.06674312
## 22 polynomial    1  0.10 0.2237815 0.10201237
## 23    sigmoid    1  0.10 0.3163866 0.08051869
## 24     linear    1  0.10 0.2902521 0.07615875
## 25 polynomial    2  0.10 0.1801681 0.07511517
## 26    sigmoid    2  0.10 0.3602521 0.07137423
## 27     linear    2  0.10 0.2728571 0.07279147
## 28 polynomial    3  0.10 0.1830252 0.07276532
## 29    sigmoid    3  0.10 0.3865546 0.08515130
## 30     linear    3  0.10 0.2757143 0.08106759
## 31 polynomial    4  0.10 0.1832773 0.05846959
## 32    sigmoid    4  0.10 0.3950420 0.08382462
## 33     linear    4  0.10 0.2786555 0.07996421
## 34 polynomial    5  0.10 0.1803361 0.06117124
## 35    sigmoid    5  0.10 0.3690756 0.09192695
## 36     linear    5  0.10 0.2815966 0.08230476
## 37 polynomial    8  0.10 0.1714286 0.07135329
## 38    sigmoid    8  0.10 0.4036134 0.08760304
## 39     linear    8  0.10 0.2932773 0.06674312
## 40 polynomial   10  0.10 0.1654622 0.07637300
## 41    sigmoid   10  0.10 0.3837815 0.08278346
## 42     linear   10  0.10 0.2932773 0.06674312
## 43 polynomial    1  0.50 0.1800000 0.06475594
## 44    sigmoid    1  0.50 0.4505882 0.05867502
## 45     linear    1  0.50 0.2902521 0.07615875
## 46 polynomial    2  0.50 0.1800000 0.06475594
## 47    sigmoid    2  0.50 0.4213445 0.08662684
## 48     linear    2  0.50 0.2728571 0.07279147
## 49 polynomial    3  0.50 0.1800000 0.06475594
## 50    sigmoid    3  0.50 0.4503361 0.07143621
## 51     linear    3  0.50 0.2757143 0.08106759
## 52 polynomial    4  0.50 0.1800000 0.06475594
## 53    sigmoid    4  0.50 0.4651261 0.05668827
## 54     linear    4  0.50 0.2786555 0.07996421
## 55 polynomial    5  0.50 0.1800000 0.06475594
## 56    sigmoid    5  0.50 0.4157983 0.06306541
## 57     linear    5  0.50 0.2815966 0.08230476
## 58 polynomial    8  0.50 0.1800000 0.06475594
## 59    sigmoid    8  0.50 0.4501681 0.08719277
## 60     linear    8  0.50 0.2932773 0.06674312
## 61 polynomial   10  0.50 0.1800000 0.06475594
## 62    sigmoid   10  0.50 0.4646218 0.07510498
## 63     linear   10  0.50 0.2932773 0.06674312
## 64 polynomial    1  0.90 0.1800000 0.06475594
## 65    sigmoid    1  0.90 0.4243697 0.06404957
## 66     linear    1  0.90 0.2902521 0.07615875
## 67 polynomial    2  0.90 0.1800000 0.06475594
## 68    sigmoid    2  0.90 0.4851261 0.06449278
## 69     linear    2  0.90 0.2728571 0.07279147
## 70 polynomial    3  0.90 0.1800000 0.06475594
## 71    sigmoid    3  0.90 0.4682353 0.06635972
## 72     linear    3  0.90 0.2757143 0.08106759
## 73 polynomial    4  0.90 0.1800000 0.06475594
## 74    sigmoid    4  0.90 0.4505042 0.05598521
## 75     linear    4  0.90 0.2786555 0.07996421
## 76 polynomial    5  0.90 0.1800000 0.06475594
## 77    sigmoid    5  0.90 0.4327731 0.08097612
## 78     linear    5  0.90 0.2815966 0.08230476
## 79 polynomial    8  0.90 0.1800000 0.06475594
## 80    sigmoid    8  0.90 0.4470588 0.06938868
## 81     linear    8  0.90 0.2932773 0.06674312
## 82 polynomial   10  0.90 0.1800000 0.06475594
## 83    sigmoid   10  0.90 0.4266387 0.08073454
## 84     linear   10  0.90 0.2932773 0.06674312

Model SVM Hybridbalancing

set.seed(123)
tuningsvm_1 <- tune(svm,promo~.,data= data_training_hyb_final,
                  ranges=list(kernel=c("polynomial","sigmoid", "linear"),cost = c(1,2,3,4,5,8,10), gamma = c(0.01,0.1,0.5,0.9)))
trainonly_1 <- tuningsvm_1$best.model
trainonly_1
## 
## Call:
## best.tune(METHOD = svm, train.x = promo ~ ., data = data_training_hyb_final, 
##     ranges = list(kernel = c("polynomial", "sigmoid", "linear"), 
##         cost = c(1, 2, 3, 4, 5, 8, 10), gamma = c(0.01, 0.1, 0.5, 
##             0.9)))
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  polynomial 
##        cost:  4 
##      degree:  3 
##      coef.0:  0 
## 
## Number of Support Vectors:  198
summary(tuningsvm_1)
## 
## Parameter tuning of 'svm':
## 
## - sampling method: 10-fold cross validation 
## 
## - best parameters:
##      kernel cost gamma
##  polynomial    4   0.1
## 
## - best performance: 0.1666667 
## 
## - Detailed performance results:
##        kernel cost gamma     error dispersion
## 1  polynomial    1  0.01 0.5522792 0.06677133
## 2     sigmoid    1  0.01 0.3111111 0.08004070
## 3      linear    1  0.01 0.2158120 0.07327610
## 4  polynomial    2  0.01 0.5522792 0.06677133
## 5     sigmoid    2  0.01 0.3113960 0.09048593
## 6      linear    2  0.01 0.2233618 0.06504981
## 7  polynomial    3  0.01 0.5522792 0.06677133
## 8     sigmoid    3  0.01 0.3112536 0.09523289
## 9      linear    3  0.01 0.2195157 0.06569699
## 10 polynomial    4  0.01 0.5522792 0.06677133
## 11    sigmoid    4  0.01 0.3109687 0.07953445
## 12     linear    4  0.01 0.2196581 0.07513425
## 13 polynomial    5  0.01 0.5522792 0.06677133
## 14    sigmoid    5  0.01 0.3074074 0.09456658
## 15     linear    5  0.01 0.2196581 0.07513425
## 16 polynomial    8  0.01 0.5484330 0.06377285
## 17    sigmoid    8  0.01 0.2730769 0.09570240
## 18     linear    8  0.01 0.2233618 0.06992095
## 19 polynomial   10  0.01 0.5484330 0.06377285
## 20    sigmoid   10  0.01 0.2655271 0.09619856
## 21     linear   10  0.01 0.2196581 0.07513425
## 22 polynomial    1  0.10 0.2353276 0.06342827
## 23    sigmoid    1  0.10 0.3074074 0.09785909
## 24     linear    1  0.10 0.2158120 0.07327610
## 25 polynomial    2  0.10 0.2086895 0.04873586
## 26    sigmoid    2  0.10 0.2994302 0.08623379
## 27     linear    2  0.10 0.2233618 0.06504981
## 28 polynomial    3  0.10 0.1856125 0.05411460
## 29    sigmoid    3  0.10 0.3294872 0.07959962
## 30     linear    3  0.10 0.2195157 0.06569699
## 31 polynomial    4  0.10 0.1666667 0.05390566
## 32    sigmoid    4  0.10 0.3186610 0.05978401
## 33     linear    4  0.10 0.2196581 0.07513425
## 34 polynomial    5  0.10 0.1742165 0.04782899
## 35    sigmoid    5  0.10 0.3450142 0.08280317
## 36     linear    5  0.10 0.2196581 0.07513425
## 37 polynomial    8  0.10 0.1856125 0.06983464
## 38    sigmoid    8  0.10 0.3297721 0.07952651
## 39     linear    8  0.10 0.2233618 0.06992095
## 40 polynomial   10  0.10 0.1854701 0.07577967
## 41    sigmoid   10  0.10 0.3028490 0.07861103
## 42     linear   10  0.10 0.2196581 0.07513425
## 43 polynomial    1  0.50 0.2119658 0.07530211
## 44    sigmoid    1  0.50 0.3819088 0.09899220
## 45     linear    1  0.50 0.2158120 0.07327610
## 46 polynomial    2  0.50 0.2119658 0.07530211
## 47    sigmoid    2  0.50 0.4085470 0.07806880
## 48     linear    2  0.50 0.2233618 0.06504981
## 49 polynomial    3  0.50 0.2119658 0.07530211
## 50    sigmoid    3  0.50 0.3562678 0.08924463
## 51     linear    3  0.50 0.2195157 0.06569699
## 52 polynomial    4  0.50 0.2119658 0.07530211
## 53    sigmoid    4  0.50 0.3633903 0.05219048
## 54     linear    4  0.50 0.2196581 0.07513425
## 55 polynomial    5  0.50 0.2119658 0.07530211
## 56    sigmoid    5  0.50 0.3517094 0.07515721
## 57     linear    5  0.50 0.2196581 0.07513425
## 58 polynomial    8  0.50 0.2119658 0.07530211
## 59    sigmoid    8  0.50 0.3633903 0.07506415
## 60     linear    8  0.50 0.2233618 0.06992095
## 61 polynomial   10  0.50 0.2119658 0.07530211
## 62    sigmoid   10  0.50 0.3478632 0.08264509
## 63     linear   10  0.50 0.2196581 0.07513425
## 64 polynomial    1  0.90 0.2119658 0.07530211
## 65    sigmoid    1  0.90 0.4018519 0.09131994
## 66     linear    1  0.90 0.2158120 0.07327610
## 67 polynomial    2  0.90 0.2119658 0.07530211
## 68    sigmoid    2  0.90 0.4044160 0.09989458
## 69     linear    2  0.90 0.2233618 0.06504981
## 70 polynomial    3  0.90 0.2119658 0.07530211
## 71    sigmoid    3  0.90 0.3974359 0.07064340
## 72     linear    3  0.90 0.2195157 0.06569699
## 73 polynomial    4  0.90 0.2119658 0.07530211
## 74    sigmoid    4  0.90 0.4088319 0.07137686
## 75     linear    4  0.90 0.2196581 0.07513425
## 76 polynomial    5  0.90 0.2119658 0.07530211
## 77    sigmoid    5  0.90 0.3977208 0.08450655
## 78     linear    5  0.90 0.2196581 0.07513425
## 79 polynomial    8  0.90 0.2119658 0.07530211
## 80    sigmoid    8  0.90 0.4009972 0.09794073
## 81     linear    8  0.90 0.2233618 0.06992095
## 82 polynomial   10  0.90 0.2119658 0.07530211
## 83    sigmoid   10  0.90 0.4048433 0.08706386
## 84     linear   10  0.90 0.2196581 0.07513425

Model SVM Diskretasi 1 + standarisasi + fenom

set.seed(123)
tuningsvm_2 <- tune(svm, promo~., data= data_training_diskret,
                  ranges=list(kernel=c("polynomial"),
                              cost = c(1,2,5,10,13),
                              degree = c(1,2,5,10)))
trainonly_2 <- tuningsvm_2$best.model
trainonly_2
## 
## Call:
## best.tune(METHOD = svm, train.x = promo ~ ., data = data_training_diskret, 
##     ranges = list(kernel = c("polynomial"), cost = c(1, 2, 5, 10, 
##         13), degree = c(1, 2, 5, 10)))
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  polynomial 
##        cost:  2 
##      degree:  2 
##      coef.0:  0 
## 
## Number of Support Vectors:  167

Model SVM FE + Anomaly Data

set.seed(123)
tuningsvm_3 <- tune(svm,promo~.,data = data_training_fenom,
                  ranges=list(kernel=c("radial"),cost = c(1,2,5,10,13), 
                              gamma = c(0.01,0.1,1,2,5,10)))
trainonly_3 <- tuningsvm_3$best.model
trainonly_3
## 
## Call:
## best.tune(METHOD = svm, train.x = promo ~ ., data = data_training_fenom, 
##     ranges = list(kernel = c("radial"), cost = c(1, 2, 5, 10, 13), 
##         gamma = c(0.01, 0.1, 1, 2, 5, 10)))
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  radial 
##        cost:  10 
## 
## Number of Support Vectors:  174

Model SVM standarisasi all

set.seed(123)
tuningsvm_5 <- tune(svm,promo~.,data= data_training_anom,
                  ranges=list(kernel=c("radial"),
                              cost = c(1,2,5,10,13), 
                              gamma = c(0.01,0.1,1,2,5,10)))
trainonly_5 <- tuningsvm_5$best.model
trainonly_5
## 
## Call:
## best.tune(METHOD = svm, train.x = promo ~ ., data = data_training_anom, 
##     ranges = list(kernel = c("radial"), cost = c(1, 2, 5, 10, 13), 
##         gamma = c(0.01, 0.1, 1, 2, 5, 10)))
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  radial 
##        cost:  10 
## 
## Number of Support Vectors:  190

Model SVM SFENOM

set.seed(123)
tuningsvm_6 <- tune(svm,promo~.,data= data_training_sfenom,
                  ranges=list(kernel=c("radial"),
                              cost = c(1,2,5,10,13), 
                              gamma = c(0.01,0.1,1,2,5,10)))
trainonly_6 <- tuningsvm_6$best.model
trainonly_6
## 
## Call:
## best.tune(METHOD = svm, train.x = promo ~ ., data = data_training_sfenom, 
##     ranges = list(kernel = c("radial"), cost = c(1, 2, 5, 10, 13), 
##         gamma = c(0.01, 0.1, 1, 2, 5, 10)))
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  radial 
##        cost:  10 
## 
## Number of Support Vectors:  174

Model SVM Fenomregresi

set.seed(123)
tuningsvm_7 <- tune(svm,promo~.,data= data_training_fenomregresi,
                  ranges=list(kernel=c( "linear"),
                              cost = c(1,2,5,10,13), 
                              gamma = c(0.01,0.1,1,2,5,10)))
trainonly_7 <- tuningsvm_7$best.model
trainonly_7
## 
## Call:
## best.tune(METHOD = svm, train.x = promo ~ ., data = data_training_fenomregresi, 
##     ranges = list(kernel = c("linear"), cost = c(1, 2, 5, 10, 13), 
##         gamma = c(0.01, 0.1, 1, 2, 5, 10)))
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  linear 
##        cost:  2 
## 
## Number of Support Vectors:  134

Prediksi

#datLatih.Prediksi1 = predict(svm.RBF1, newdata = datLatih)
pred <- predict(trainonly,newdata = data_testing)
predtrain <- predict(trainonly,newdata = data_training)

# Prediksi Data Praproses under Sampling  + sfenom
pred_under <- predict(trainonly_under, newdata = data_testing_sfenom)
pred_undertrain <- predict(trainonly_under, newdata = data_training_sfenom)

# Prediksi Data Praproses Over Sampling + sfenom
pred_ovr <- predict(trainonly_over, newdata = data_testing_sfenom)
pred_ovrtrain <- predict(trainonly_over, newdata = data_training_sfenom)

# Prediksi Data Praproses Hybrid Sampling + sfenom
pred_hyb <- predict(trainonly_1, newdata = data_testing_sfenom)
pred_hybtrain <- predict(trainonly_1, newdata = data_training_sfenom)

# Prediksi Data Praproses Data Diskretasi
pred_2 <- predict(trainonly_2, newdata = data_testing_diskret)
pred_2train <- predict(trainonly_2, newdata = data_training_diskret)

# Data training Oversample, diskretasi, dan one hot
pred_3 <- predict(trainonly_3, newdata = data_testing_fenom)
pred_3train <- predict(trainonly_3, newdata = data_training_fenom)

# pred_4 <- predict(trainonly_4, newdata = data_testing_newvar)
# pred_4train <- predict(trainonly_4, newdata = data_training_newvar)

# Standarisasi All
pred_5 <- predict(trainonly_5, newdata = data_testing_anom)
pred_5train <- predict(trainonly_5, newdata = data_training_anom)

pred_6 <- predict(trainonly_6, newdata = data_testing_sfenom)
pred_6train <- predict(trainonly_6, newdata = data_training_sfenom)

pred_7 <- predict(trainonly_7, newdata = data_testing_fenomregresi)
pred_7train <- predict(trainonly_7, newdata = data_training_fenomregresi)

Tabel Prediksi

cm <- table(data_testing$promo, pred)
cf <- confusionMatrix(cm)
akurasi <- data.frame("Accuracy" = cf$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf$byClass[["Balanced Accuracy"]], 
              "Sensitivity" = cf$byClass[["Sensitivity"]], 
              "Specificity" = cf$byClass[["Specificity"]])

cmtrain <- table(data_training$promo, predtrain)
cftrain <- confusionMatrix(cmtrain)
akurasitrain <- data.frame("Accuracy" = cftrain$overall[["Accuracy"]],
                       "Balanced Accuracy" = cftrain$byClass[["Balanced Accuracy"]], 
              "Sensitivity" = cftrain$byClass[["Sensitivity"]], 
              "Specificity" = cftrain$byClass[["Specificity"]])

cm_under <- table(data_testing_sfenom$promo, pred_under)
cf_under <- confusionMatrix(cm_under)
akurasi_under <- data.frame("Accuracy" = cf_under$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_under$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_under$byClass[["Sensitivity"]],
              "Specificity" = cf_under$byClass[["Specificity"]])

cm_undertrain <- table(data_training_sfenom$promo, pred_undertrain)
cf_undertrain <- confusionMatrix(cm_undertrain)
akurasi_undertrain <- data.frame("Accuracy" = cf_under$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_under$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_under$byClass[["Sensitivity"]],
              "Specificity" = cf_under$byClass[["Specificity"]])

cm_ovrtest <- table(data_testing_sfenom$promo, pred_ovr)
cf_ovrtest <- confusionMatrix(cm_ovrtest)
akurasi_ovr <- data.frame("Accuracy" = cf_ovrtest$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_ovrtest$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_ovrtest$byClass[["Sensitivity"]],
              "Specificity" = cf_ovrtest$byClass[["Specificity"]])

cm_ovrtrain <- table(data_training_sfenom$promo, pred_ovrtrain)
cf_ovrtrain <- confusionMatrix(cm_ovrtrain)
akurasi_ovrtrain <- data.frame("Accuracy" = cf_ovrtrain$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_ovrtrain$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_ovrtrain$byClass[["Sensitivity"]],
              "Specificity" = cf_ovrtrain$byClass[["Specificity"]])

cm_hyb <- table(data_testing_sfenom$promo, pred_hyb)
cf_hyb <- confusionMatrix(cm_hyb)
akurasi_hyb <- data.frame("Accuracy" = cf_hyb$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_hyb$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_hyb$byClass[["Sensitivity"]],
              "Specificity" = cf_hyb$byClass[["Specificity"]])

cm_hybtrain <- table(data_training_sfenom$promo, pred_hybtrain)
cf_hybtrain <- confusionMatrix(cm_hybtrain)
akurasi_hybtrain <- data.frame("Accuracy" = cf_hybtrain$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_hybtrain$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_hybtrain$byClass[["Sensitivity"]],
              "Specificity" = cf_hybtrain$byClass[["Specificity"]])

cm_2 <- table(data_testing_diskret$promo, pred_2)
cf_2 <- confusionMatrix(cm_2)
akurasi_2 <- data.frame("Accuracy" = cf_2$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_2$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_2$byClass[["Sensitivity"]],
              "Specificity" = cf_2$byClass[["Specificity"]])

cm_2train <- table(data_training_diskret$promo, pred_2train)
cf_2train <- confusionMatrix(cm_2train)
akurasi_2train <- data.frame("Accuracy" = cf_2train$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_2train$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_2train$byClass[["Sensitivity"]],
              "Specificity" = cf_2train$byClass[["Specificity"]])

cm_3 <- table(data_testing_fenom$promo, pred_3)
cf_3 <- confusionMatrix(cm_3)
akurasi_3 <- data.frame("Accuracy" = cf_3$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_3$byClass[["Balanced Accuracy"]], 
              "Sensitivity" = cf_3$byClass[["Sensitivity"]], 
              "Specificity" = cf_3$byClass[["Specificity"]])

cm_3train <- table(data_training_fenom$promo, pred_3train)
cf_3train <- confusionMatrix(cm_3train)
akurasi_3train <- data.frame("Accuracy" = cf_3train$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_3train$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_3train$byClass[["Sensitivity"]],
              "Specificity" = cf_3train$byClass[["Specificity"]])

# cm_4 <- table(data_testing_newvar$promo, pred_4)
# cf_4 <- confusionMatrix(cm_4)
# akurasi_4 <- data.frame("Accuracy" = cf_4$overall[["Accuracy"]],
#                        "Balanced Accuracy" = cf_4$byClass[["Balanced Accuracy"]], 
#               "Sensitivity" = cf_4$byClass[["Sensitivity"]], 
#               "Specificity" = cf_4$byClass[["Specificity"]])

# cm_4train <- table(data_training_newvar$promo, pred_4train)
# cf_4train <- confusionMatrix(cm_4train)
# akurasi_4train <- data.frame("Accuracy" = cf_4train$overall[["Accuracy"]],
#                        "Balanced Accuracy" = cf_4train$byClass[["Balanced Accuracy"]], 
#               "Sensitivity" = cf_4train$byClass[["Sensitivity"]], 
#               "Specificity" = cf_4train$byClass[["Specificity"]])

cm_5 <- table(data_testing_anom$promo, pred_5)
cf_5 <- confusionMatrix(cm_5)
akurasi_5 <- data.frame("Accuracy" = cf_5$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_5$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_5$byClass[["Sensitivity"]],
              "Specificity" = cf_5$byClass[["Specificity"]])

cm_5train <- table(data_training_anom$promo, pred_5train)
cf_5train <- confusionMatrix(cm_5train)
akurasi_5train <- data.frame("Accuracy" = cf_5train$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_5train$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_5train$byClass[["Sensitivity"]],
              "Specificity" = cf_5train$byClass[["Specificity"]])

# Model SFENOM

cm_6 <- table(data_testing_sfenom$promo, pred_6)
cf_6 <- confusionMatrix(cm_6)
akurasi_6 <- data.frame("Accuracy" = cf_6$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_6$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_6$byClass[["Sensitivity"]],
              "Specificity" = cf_6$byClass[["Specificity"]])

cm_6train <- table(data_training_sfenom$promo, pred_6train)
cf_6train <- confusionMatrix(cm_6train)
akurasi_6train <- data.frame("Accuracy" = cf_6train$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_6train$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_6train$byClass[["Sensitivity"]],
              "Specificity" = cf_6train$byClass[["Specificity"]])

# Model FENOM Regresi

cm_7 <- table(data_testing_fenomregresi$promo, pred_7)
cf_7 <- confusionMatrix(cm_7)
akurasi_7 <- data.frame("Accuracy" = cf_7$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_7$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_7$byClass[["Sensitivity"]],
              "Specificity" = cf_7$byClass[["Specificity"]])

cm_7train <- table(data_training_fenomregresi$promo, pred_7train)
cf_7train <- confusionMatrix(cm_7train)
akurasi_7train <- data.frame("Accuracy" = cf_7train$overall[["Accuracy"]],
                       "Balanced Accuracy" = cf_7train$byClass[["Balanced Accuracy"]],
              "Sensitivity" = cf_7train$byClass[["Sensitivity"]],
              "Specificity" = cf_7train$byClass[["Specificity"]])

Akurasi

Akurasi Testing

akurasitest <- rbind("Under Balancing" = akurasi_under, "Hybrid Balancing" = akurasi_hyb, "Over Balancing" = akurasi_ovr,"Train" = akurasi, "Diskretasi" = akurasi_2,  "FENOM" = akurasi_3, "Anom" = akurasi_5)
akurasitest
##                   Accuracy Balanced.Accuracy Sensitivity Specificity
## Under Balancing  0.7424242         0.7255359   0.8648649   0.5862069
## Hybrid Balancing 0.7575758         0.7417582   0.7692308   0.7142857
## Over Balancing   0.7121212         0.6716687   0.7551020   0.5882353
## Train            0.7916667         0.7823529   0.8000000   0.7647059
## Diskretasi       0.7727273         0.7444444   0.8222222   0.6666667
## FENOM            0.8030303         0.8193033   0.7924528   0.8461538
## Anom             0.7916667         0.7823529   0.8000000   0.7647059
(akurasitest[6,3] + akurasitest[6,4])/2
## [1] 0.8193033

Akurasi Train

akurasitraining <- rbind("Under Balancing" = akurasi_undertrain, "Hybrid Balancing" = akurasi_hybtrain, "Over Balancing" = akurasi_ovrtrain, "Train" = akurasitrain, "Diskretasi" = akurasi_2train,  "FENOM" = akurasi_3train, "Anom" = akurasi_5train)
akurasitraining
##                   Accuracy Balanced.Accuracy Sensitivity Specificity
## Under Balancing  0.7424242         0.7255359   0.8648649   0.5862069
## Hybrid Balancing 0.8880309         0.8861447   0.8907104   0.8815789
## Over Balancing   0.9575290         0.9623603   0.9497207   0.9750000
## Train            0.8251748         0.8705882   0.8000000   0.9411765
## Diskretasi       0.9150579         0.9242156   0.9032258   0.9452055
## FENOM            0.7992278         0.8157994   0.7884615   0.8431373
## Anom             0.8251748         0.8705882   0.8000000   0.9411765

Akurasi all

data.frame("Keterangan" = rownames(akurasitest), 
           "Test_Accuracy" = akurasitest$Accuracy,  
           "Train_Accuracy" = akurasitraining$Accuracy,
           "Test_B.Accuracy" = akurasitest$Balanced.Accuracy,  
           "Train_B.Accuracy" = akurasitraining$Balanced.Accuracy)
##         Keterangan Test_Accuracy Train_Accuracy Test_B.Accuracy
## 1  Under Balancing     0.7424242      0.7424242       0.7255359
## 2 Hybrid Balancing     0.7575758      0.8880309       0.7417582
## 3   Over Balancing     0.7121212      0.9575290       0.6716687
## 4            Train     0.7916667      0.8251748       0.7823529
## 5       Diskretasi     0.7727273      0.9150579       0.7444444
## 6            FENOM     0.8030303      0.7992278       0.8193033
## 7             Anom     0.7916667      0.8251748       0.7823529
##   Train_B.Accuracy
## 1        0.7255359
## 2        0.8861447
## 3        0.9623603
## 4        0.8705882
## 5        0.9242156
## 6        0.8157994
## 7        0.8705882