library(dplyr)
## 
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
## 
##     filter, lag
## The following objects are masked from 'package:base':
## 
##     intersect, setdiff, setequal, union
library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ forcats   1.0.0     ✔ readr     2.1.4
## ✔ ggplot2   3.4.3     ✔ stringr   1.5.0
## ✔ lubridate 1.9.2     ✔ tibble    3.2.1
## ✔ purrr     1.0.1     ✔ tidyr     1.3.0
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::filter() masks stats::filter()
## ✖ dplyr::lag()    masks stats::lag()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(tidymodels)
## ── Attaching packages ────────────────────────────────────── tidymodels 1.1.1 ──
## ✔ broom        1.0.5     ✔ rsample      1.2.0
## ✔ dials        1.2.0     ✔ tune         1.1.2
## ✔ infer        1.0.5     ✔ workflows    1.1.3
## ✔ modeldata    1.2.0     ✔ workflowsets 1.0.1
## ✔ parsnip      1.1.1     ✔ yardstick    1.2.0
## ✔ recipes      1.0.8     
## ── Conflicts ───────────────────────────────────────── tidymodels_conflicts() ──
## ✖ scales::discard() masks purrr::discard()
## ✖ dplyr::filter()   masks stats::filter()
## ✖ recipes::fixed()  masks stringr::fixed()
## ✖ dplyr::lag()      masks stats::lag()
## ✖ yardstick::spec() masks readr::spec()
## ✖ recipes::step()   masks stats::step()
## • Dig deeper into tidy modeling with R at https://www.tmwr.org
library(MASS)
## 
## Attaching package: 'MASS'
## 
## The following object is masked from 'package:dplyr':
## 
##     select
library(DMwR)
## Loading required package: lattice
## Loading required package: grid
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo 
## 
## Attaching package: 'DMwR'
## 
## The following object is masked from 'package:broom':
## 
##     bootstrap
library(caret)
## 
## Attaching package: 'caret'
## 
## The following objects are masked from 'package:yardstick':
## 
##     precision, recall, sensitivity, specificity
## 
## The following object is masked from 'package:purrr':
## 
##     lift
library(ROCR)
# Contains the data
library(ISLR)
## Warning: package 'ISLR' was built under R version 4.3.2
#library(rJava)
#library(extraTrees)
library("writexl")
## Warning: package 'writexl' was built under R version 4.3.2
library(openxlsx)
## Warning: package 'openxlsx' was built under R version 4.3.2
library(themis)
library(embed)
## Warning: package 'embed' was built under R version 4.3.2
library(DataExplorer)
library(ggpubr)
library(DALEXtra)
## Warning: package 'DALEXtra' was built under R version 4.3.2
## Loading required package: DALEX
## Warning: package 'DALEX' was built under R version 4.3.2
## Welcome to DALEX (version: 2.4.3).
## Find examples and detailed introduction at: http://ema.drwhy.ai/
## 
## 
## Attaching package: 'DALEX'
## 
## The following object is masked from 'package:dplyr':
## 
##     explain
library(rcompanion)
## Warning: package 'rcompanion' was built under R version 4.3.2
## 
## Attaching package: 'rcompanion'
## 
## The following object is masked from 'package:yardstick':
## 
##     accuracy
library(randomForest)
## randomForest 4.7-1.1
## Type rfNews() to see new features/changes/bug fixes.
## 
## Attaching package: 'randomForest'
## 
## The following object is masked from 'package:ggplot2':
## 
##     margin
## 
## The following object is masked from 'package:dplyr':
## 
##     combine
library(xgboost)
## Warning: package 'xgboost' was built under R version 4.3.2
## 
## Attaching package: 'xgboost'
## 
## The following object is masked from 'package:dplyr':
## 
##     slice
setwd("C:/DATA D/S2 Statistika dan Sains Data/Semester 3/Pemodelan Klasifikasi")

DATA ORI

moklas<-read.csv("data moklas.csv",header = T,sep = ';')
moklas
moklas_numeric <- moklas[sapply(moklas, is.numeric)]
moklas_numeric
glimpse(moklas)
## Rows: 359
## Columns: 13
## $ cabang             <int> 9, 14, 8, 11, 9, 1, 9, 10, 14, 5, 5, 11, 1, 10, 7, …
## $ jenis.kelamin      <int> 1, 1, 1, 2, 1, 1, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, …
## $ usia               <int> 37, 44, 33, 40, 49, 41, 47, 47, 42, 31, 42, 41, 44,…
## $ pendidikan         <int> 3, 3, 4, 3, 2, 2, 3, 4, 4, 2, 3, 2, 4, 3, 4, 4, 3, …
## $ frekuensi.fashion  <int> 3, 3, 4, 4, 2, 2, 6, 4, 3, 3, 5, 0, 4, 3, 3, 1, 5, …
## $ nilai.fashion      <dbl> 0.3568, 0.5388, 0.3012, 0.9808, 0.1730, 0.6352, 2.4…
## $ frekuensi.footwear <int> 2, 1, 4, 3, 2, 0, 2, 1, 3, 1, 2, 5, 3, 2, 2, 1, 3, …
## $ nilai.footwear     <dbl> 3.0260, 1.5922, 0.4540, 0.2978, 0.1484, 0.6734, 0.5…
## $ frekuensi.lainnya  <int> 3, 2, 4, 4, 1, 2, 5, 4, 3, 3, 5, 0, 1, 2, 3, 0, 5, …
## $ nilai.lainnya      <dbl> 2.2050, 0.4408, 0.9494, 0.9728, 0.4654, 0.9780, 1.8…
## $ total.nilai.tunai  <dbl> 8.43, 1.79, 0.00, 7.55, 1.05, 2.68, 21.26, 4.89, 0.…
## $ lama.member        <int> 13, 39, 18, 46, 1, 7, 16, 24, 5, 49, 45, 1, 24, 32,…
## $ promo              <int> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, …
skimr::skim(moklas)
Data summary
Name moklas
Number of rows 359
Number of columns 13
_______________________
Column type frequency:
numeric 13
________________________
Group variables None

Variable type: numeric

skim_variable n_missing complete_rate mean sd p0 p25 p50 p75 p100 hist
cabang 0 1 7.62 3.96 1.00 4.00 8.00 11.00 14.00 ▇▆▅▇▇
jenis.kelamin 0 1 1.57 0.50 1.00 1.00 2.00 2.00 2.00 ▆▁▁▁▇
usia 0 1 40.14 5.07 26.00 37.00 40.00 44.00 56.00 ▂▆▇▃▁
pendidikan 0 1 3.06 0.81 1.00 3.00 3.00 4.00 4.00 ▁▃▁▇▆
frekuensi.fashion 0 1 3.06 1.52 0.00 2.00 3.00 4.00 8.00 ▃▇▃▃▁
nilai.fashion 0 1 0.78 0.52 0.03 0.39 0.67 1.05 2.63 ▇▇▃▁▁
frekuensi.footwear 0 1 3.07 1.63 0.00 2.00 3.00 4.00 8.00 ▃▇▃▂▁
nilai.footwear 0 1 0.85 0.62 0.03 0.41 0.69 1.11 3.55 ▇▅▂▁▁
frekuensi.lainnya 0 1 2.78 1.49 0.00 2.00 3.00 4.00 7.00 ▃▃▇▂▁
nilai.lainnya 0 1 0.85 0.59 0.02 0.43 0.74 1.11 3.03 ▇▇▂▁▁
total.nilai.tunai 0 1 2.23 3.43 0.00 0.00 0.67 3.04 23.02 ▇▂▁▁▁
lama.member 0 1 25.80 14.42 1.00 14.00 25.00 38.00 51.00 ▇▇▆▆▇
promo 0 1 0.33 0.47 0.00 0.00 0.00 1.00 1.00 ▇▁▁▁▅
moklas$promo<-as.factor(moklas$promo)
moklas$jenis.kelamin<-as.factor(moklas$jenis.kelamin)
moklas$pendidikan<-as.factor(moklas$pendidikan)
moklas$cabang<-as.factor(moklas$cabang)
moklas
summary(moklas)
##      cabang    jenis.kelamin      usia       pendidikan frekuensi.fashion
##  9      : 36   1:153         Min.   :26.00   1: 12      Min.   :0.000    
##  3      : 32   2:206         1st Qu.:37.00   2: 72      1st Qu.:2.000    
##  10     : 32                 Median :40.00   3:156      Median :3.000    
##  7      : 31                 Mean   :40.14   4:119      Mean   :3.061    
##  12     : 29                 3rd Qu.:44.00              3rd Qu.:4.000    
##  14     : 27                 Max.   :56.00              Max.   :8.000    
##  (Other):172                                                             
##  nilai.fashion    frekuensi.footwear nilai.footwear   frekuensi.lainnya
##  Min.   :0.0296   Min.   :0.000      Min.   :0.0250   Min.   :0.000    
##  1st Qu.:0.3944   1st Qu.:2.000      1st Qu.:0.4122   1st Qu.:2.000    
##  Median :0.6694   Median :3.000      Median :0.6946   Median :3.000    
##  Mean   :0.7840   Mean   :3.072      Mean   :0.8474   Mean   :2.783    
##  3rd Qu.:1.0457   3rd Qu.:4.000      3rd Qu.:1.1068   3rd Qu.:4.000    
##  Max.   :2.6276   Max.   :8.000      Max.   :3.5494   Max.   :7.000    
##                                                                        
##  nilai.lainnya    total.nilai.tunai  lama.member   promo  
##  Min.   :0.0214   Min.   : 0.000    Min.   : 1.0   0:239  
##  1st Qu.:0.4332   1st Qu.: 0.000    1st Qu.:14.0   1:120  
##  Median :0.7360   Median : 0.670    Median :25.0          
##  Mean   :0.8527   Mean   : 2.234    Mean   :25.8          
##  3rd Qu.:1.1142   3rd Qu.: 3.040    3rd Qu.:38.0          
##  Max.   :3.0334   Max.   :23.020    Max.   :51.0          
## 
DataExplorer::plot_intro(moklas,theme_config = theme_classic())

cbg<-read.csv("cabang.csv",header=T,sep=';')
cbg

Eksplorasi data

ggplot(cbg, 
       aes(x= Kode.Cabang, 
           y= Banyaknya.Pegawai, fill=Kode.Cabang)) + 
  geom_bar(stat = "identity", 
           fill = rainbow(14), 
           color = "black") +
  geom_col() + 
  labs(title = "Banyaknya Pegawai Tiap Cabang", 
       x = "Kode Cabang", 
       y = "Banyaknya Pegawai", 
       fill = NULL) +
  coord_flip() +
  geom_text(aes(label = cbg$Banyaknya.Pegawai),
            hjust = 2,
            color = "white",
            fontface = "bold")
## Warning: Use of `cbg$Banyaknya.Pegawai` is discouraged.
## ℹ Use `Banyaknya.Pegawai` instead.

# gambar diagram batang dengan variabel y persentasenya, diurutkan secara menurun dan label setiap batang
ggplot(cbg, 
       aes(x = Kode.Cabang,
           y = Banyaknya.Member)) + 
  geom_bar(stat = "identity", 
           fill = rainbow(14), 
           color = "black") +
  geom_text(aes(label = Banyaknya.Member), 
            hjust = -0.10) +
  theme_minimal() +                               
  labs(x = "Kode Cabang", 
       y = "Banyaknya Member", 
       title  = "Banyaknya Member Tiap Cabang")+
  coord_flip()

library(RColorBrewer)
fitur = c("Nilai \n Fashion", "Nilai \n Footwear", "Nilai \n Lainnya")
moklas.nilai<-data.frame(moklas$nilai.fashion,moklas$nilai.footwear,moklas$nilai.lainnya)
boxplot(moklas.nilai,
        las=1,
        cex = 0.4,
        cex.axis = 0.9,
        cex.names = 1,
        main = "Variabel Penentuan Respon Pelanggan Terhadap Promo",
        ylab="Nilai",
        xlab="",
        horizontal = F,
        col = brewer.pal(8,"Set2"),
        par(mar=c(9,5,4,2)),xaxt="n")
axis(1, at=1:3, labels=fitur, col.axis="black",cex=1.4,cex = 1.4,
        cex.axis = 1)

library(RColorBrewer)
fitur = c("Usia","Lama \n Member")
moklas.angka<-data.frame(moklas$usia,moklas$lama.member)
boxplot(moklas.angka,
        las=1,
        cex = 0.4,
        cex.axis = 0.9,
        cex.names = 1,
        main = "Variabel Penentuan Respon Pelanggan Terhadap Promo",
        ylab="Nilai",
        xlab="",
        horizontal = F,
        col = brewer.pal(8,"Set2"),
        par(mar=c(9,5,4,2)),xaxt="n")
axis(1, at=1:2, labels=fitur, col.axis="black",cex=1.4,cex = 1.4,
        cex.axis = 1)

library(RColorBrewer)
fitur = c("Frekuensi \n Fashion", "Frekuensi \n Footwear", "Frekuensi \n Lainnya")
moklas.frekuensi<-data.frame(moklas$frekuensi.fashion,moklas$frekuensi.footwear,moklas$frekuensi.lainnya)
boxplot(moklas.frekuensi,
        las=1,
        cex = 0.4,
        cex.axis = 0.9,
        cex.names = 1,
        main = "Variabel Penentuan Respon Pelanggan Terhadap Promo",
        ylab="Nilai",
        xlab="",
        horizontal = F,
        col = brewer.pal(8,"Set2"),
        par(mar=c(9,5,4,2)),xaxt="n")
axis(1, at=1:3, labels=fitur, col.axis="black",cex=1.4,cex = 1.4,
        cex.axis = 1)

DataExplorer::plot_bar(data = moklas$promo,ggtheme = theme_bw())

prop.table(table(moklas$promo))
## 
##         0         1 
## 0.6657382 0.3342618
barplot(table(moklas$promo), col = "lightblue")
text(60,"66.57%")
text(2, y = 60,"33.43%")

moklas
library(ggplot2)
p1<- ggplot(data=moklas, aes(y= frekuensi.fashion, group=promo, fill=promo)) +geom_boxplot()
p2<- ggplot(data=moklas, aes(y= frekuensi.footwear, group=promo, fill=promo)) +geom_boxplot()
p3<- ggplot(data=moklas, aes(y= frekuensi.lainnya, group=promo, fill=promo)) +geom_boxplot()
gridExtra::grid.arrange(p1, p2,p3, nrow = 1)

library(ggplot2)
p1<- ggplot(data=moklas, aes(y= nilai.fashion, group=promo, fill=promo)) +geom_boxplot()
p2<- ggplot(data=moklas, aes(y= nilai.footwear, group=promo, fill=promo)) +geom_boxplot()
p3<- ggplot(data=moklas, aes(y= nilai.lainnya, group=promo, fill=promo)) +geom_boxplot()
p4<- ggplot(data=moklas, aes(y= total.nilai.tunai, group=promo, fill=promo)) +geom_boxplot()
gridExtra::grid.arrange(p1, p2,p3,p4, nrow = 1)

library(ggplot2)
p1<- ggplot(data=moklas, aes(y= usia, group=promo, fill=promo)) +geom_boxplot()
p2<- ggplot(data=moklas, aes(y= lama.member, group=promo, fill=promo)) +geom_boxplot()
gridExtra::grid.arrange(p1, p2, nrow = 1)

library(ggplot2)
p1<- ggplot(data=moklas, aes(x= pendidikan, group=promo, fill=promo)) +geom_bar()
p2<- ggplot(data=moklas, aes(x= cabang, group=promo, fill=promo)) +geom_bar()
p3<- ggplot(data=moklas, aes(x= jenis.kelamin, group=promo, fill=promo)) +geom_bar()
gridExtra::grid.arrange(p1, p2,p3, nrow = 1)

library(viridis)
## Warning: package 'viridis' was built under R version 4.3.2
## Loading required package: viridisLite
## 
## Attaching package: 'viridis'
## The following object is masked from 'package:scales':
## 
##     viridis_pal
b1<- ggplot(data=moklas, aes(x= cabang, group=promo, fill=promo)) +
  geom_bar(alpha=.7) +
  scale_fill_viridis(discrete = T)+
  theme_classic()+
  xlab("cabang")
b2<- ggplot(data=moklas, aes(x= jenis.kelamin, group=promo, fill=promo)) +
  geom_bar(alpha=.7) +
  scale_fill_viridis(discrete = T)+
  theme_classic()+
  xlab("jenis kelamin")
b3<- ggplot(data=moklas, aes(x= pendidikan, group=promo, fill=promo)) +
  geom_bar(alpha=.7) +
  scale_fill_viridis(discrete = T)+
  theme_classic()+
  xlab("pendidikan")
gridExtra::grid.arrange(b1,b2,b3, nrow = 1)

# Mencari korelasi peubah numerik
moklas_numeric <- moklas[sapply(moklas, is.numeric)]
moklas_numeric
corrplot::corrplot(cor(moklas_numeric),
                   method= "number", type = "lower")

plot(moklas$frekuensi.fashion, moklas$frekuensi.lainnya, main = "Scatter Plot Frekuensi Fashion dan Frekuensi Lainnya")

Splitting Data

set.seed(16)
train <- createDataPartition(as.factor(moklas$promo), p=0.8, list=FALSE)
moklas.train=moklas[train,]
moklas.test=moklas[-train,]

DATA SETELAH DI FEATURE ENGINEERING

setwd("C:/DATA D/S2 Statistika dan Sains Data/Semester 3/Pemodelan Klasifikasi")
moklas3.jk<-read.csv("moklas feature engineering.csv",header = T,sep = ';')
moklas3.jk
moklas3.jk<-moklas3.jk[,-2]
moklas3.jk$promo<-as.factor(moklas3.jk$promo)
moklas3.jk$pendidikan<-as.factor(moklas3.jk$pendidikan)
moklas3.jk$cabang<-as.factor(moklas3.jk$cabang)
moklas3.jk
library(classInt)
## Warning: package 'classInt' was built under R version 4.3.2
# equal frequency discretization
eqfreq<-classIntervals(moklas3.jk$usia, 4, style = 'quantile')
eqfreq$brks<-eqfreq$brks
moklas3.jk$usia<-cut(moklas3.jk$usia, breaks=eqfreq$brks, label=1:4,include.lowest=TRUE)
eqfreq5<-classIntervals(moklas3.jk$lama.member, 4, style = 'quantile')
eqfreq5$brks
## [1]  1 14 25 38 51
moklas3.jk$lama.member<-cut(moklas3.jk$lama.member, breaks=eqfreq5$brks, label=1:4,include.lowest=TRUE)

EKSPLORASI DATA

library(RColorBrewer)
fitur = c("Jumlah \n Fashion", "Jumlah \n Footwear", "Jumlah \n Lainnya")
moklas3.jk.nilai<-data.frame(moklas3.jk$jumlah.fashion,moklas3.jk$jumlah.footwear,moklas3.jk$jumlah.lainnya)
boxplot(moklas3.jk.nilai,
        las=1,
        cex = 0.4,
        cex.axis = 0.9,
        cex.names = 1,
        main = "Variabel Penentuan Respon Pelanggan Terhadap Promo",
        ylab="Nilai",
        xlab="",
        horizontal = F,
        col = brewer.pal(8,"Set2"),
        par(mar=c(9,5,4,2)),xaxt="n")
axis(1, at=1:3, labels=fitur, col.axis="black",cex=1.4,cex = 1.4,
        cex.axis = 1)

library(RColorBrewer)
fitur = c("Usia","Lama \n Member")
moklas3.jk.angka<-data.frame(moklas3.jk$usia,moklas3.jk$lama.member)
boxplot(moklas3.jk.angka,
        las=1,
        cex = 0.4,
        cex.axis = 0.9,
        cex.names = 1,
        main = "Variabel Penentuan Respon Pelanggan Terhadap Promo",
        ylab="Nilai",
        xlab="",
        horizontal = F,
        col = brewer.pal(8,"Set2"),
        par(mar=c(9,5,4,2)),xaxt="n")
axis(1, at=1:2, labels=fitur, col.axis="black",cex=1.4,cex = 1.4,
        cex.axis = 1)

DataExplorer::plot_bar(data = moklas3.jk$promo,ggtheme = theme_bw())

prop.table(table(moklas3.jk$promo))
## 
##         0         1 
## 0.6657382 0.3342618
barplot(table(moklas3.jk$promo), col = c("lightcoral","lightblue"),ylim=c(0,300))
text(0.8, y=270,"66.57%")
text(2, y = 140,"33.43%")

moklas3.jk
library(ggplot2)
p1<- ggplot(data=moklas3.jk, aes(y= jumlah.fashion, group=promo, fill=promo)) +geom_boxplot()
p2<- ggplot(data=moklas3.jk, aes(y= jumlah.footwear, group=promo, fill=promo)) +geom_boxplot()
p3<- ggplot(data=moklas3.jk, aes(y= jumlah.lainnya, group=promo, fill=promo)) +geom_boxplot()
gridExtra::grid.arrange(p1, p2,p3, nrow = 1)

library(ggplot2)
p1<- ggplot(data=moklas3.jk, aes(x= pendidikan, group=promo, fill=promo)) +geom_bar()
p2<- ggplot(data=moklas3.jk, aes(x= cabang, group=promo, fill=promo)) +geom_bar()
gridExtra::grid.arrange(p1, p2, nrow = 1)

library(ggplot2)
p3<- ggplot(data=moklas3.jk, aes(x= usia, group=promo, fill=promo)) +geom_bar()
p4<- ggplot(data=moklas3.jk, aes(x= lama.member, group=promo, fill=promo)) +geom_bar()
gridExtra::grid.arrange(p3, p4, nrow = 1)

library(viridis)
b1<- ggplot(data=moklas3.jk, aes(x= cabang, group=promo, fill=promo)) +
  geom_bar(alpha=.7) +
  scale_fill_viridis(discrete = T)+
  theme_classic()+
  xlab("cabang")
b3<- ggplot(data=moklas3.jk, aes(x= pendidikan, group=promo, fill=promo)) +
  geom_bar(alpha=.7) +
  scale_fill_viridis(discrete = T)+
  theme_classic()+
  xlab("pendidikan")
gridExtra::grid.arrange(b1,b3, nrow = 1)

# Mencari korelasi peubah numerik
moklas_numeric <- moklas3.jk[sapply(moklas3.jk, is.numeric)]
moklas_numeric
corrplot::corrplot(cor(moklas_numeric),
                   method= "number", type = "lower")

ggplot(data = moklas3.jk, aes(fill = promo))+
  geom_bar(aes(x = promo))+
  xlab("")+
  ylab("pendidikan")+
  scale_y_continuous(expand = c(0,0))+
  scale_x_discrete(expand = c(0,0))+
  theme(legend.position = "none", 
        legend.title = element_blank(),
        panel.grid.major = element_blank(),
        panel.grid.minor = element_blank(),
        panel.background = element_blank())

ggplot(moklas3.jk,aes(x=pendidikan,fill=promo))+
    geom_bar(position="fill")+
    geom_text(aes(label=scales::percent(..count../sum(..count..))),
              stat='count',position=position_fill(vjust=0.5))
## Warning: The dot-dot notation (`..count..`) was deprecated in ggplot2 3.4.0.
## ℹ Please use `after_stat(count)` instead.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.

ggplot(moklas3.jk,aes(x=cabang,fill=promo))+
    geom_bar(position="fill")+
    geom_text(aes(label=scales::percent(..count../sum(..count..))),
              stat='count',position=position_fill(vjust=0.5),cex=2)

ggplot(moklas3.jk,aes(x=usia,fill=promo))+
    geom_bar(position="fill")+
    geom_text(aes(label=scales::percent(..count../sum(..count..))),
              stat='count',position=position_fill(vjust=0.5),cex=2)

ggplot(moklas3.jk,aes(x=lama.member,fill=promo))+
    geom_bar(position="fill")+
    geom_text(aes(label=scales::percent(..count../sum(..count..))),
              stat='count',position=position_fill(vjust=0.5),cex=2)

SPLITTING DATA

set.seed(16)
train3 <- createDataPartition(as.factor(moklas3.jk$promo), p=0.8, list=FALSE)
moklas3.jk.train=moklas3.jk[train3,]
moklas3.jk.test=moklas3.jk[-train3,]

UJI KORELASI

Cramer’s V (Nominal)

thislist <- list()
kolom <- 1
k = 1

for(i in kolom){
  thislist <- append(thislist,cramerV(moklas3.jk.train[,9], moklas3.jk.train[,i], bias.correct = FALSE)[[1]])
  names(thislist)[[k]] <- names(moklas3.jk)[i]
  
  k = k+1
}

thislist
## $cabang
## [1] 0.5056

Kendall (Ordinal)

thislist_2 <- list()
kolom_2 <- c(2,3,8)
k = 1

for(i in kolom_2){
  thislist_2 <- append(thislist_2,cor.test(as.numeric(moklas3.jk.train[,9]), as.numeric(moklas3.jk.train[,i]),method="kendall")$estimate[[1]])
  names(thislist_2)[[k]] <- names(moklas3.jk)[i]
  
  k = k+1
}

thislist_2
## $usia
## [1] 0.1044235
## 
## $pendidikan
## [1] -0.1015961
## 
## $lama.member
## [1] -0.1039916

IMBALANCE

RANDOM UNDERSAMPLING

set.seed(16)
down_train <- downSample(x = moklas3.jk.train,y 
= moklas3.jk.train$promo)

colnames(down_train)[colnames(down_train)=="Class"] = "promo"

glimpse(down_train) 
## Rows: 192
## Columns: 10
## $ cabang            <fct> 9, 13, 4, 1, 12, 1, 3, 6, 2, 12, 9, 6, 3, 3, 14, 4, …
## $ usia              <fct> 2, 3, 1, 3, 4, 3, 1, 1, 2, 2, 2, 3, 2, 3, 1, 2, 4, 4…
## $ pendidikan        <fct> 4, 4, 4, 4, 4, 4, 4, 3, 2, 3, 3, 2, 2, 3, 4, 3, 4, 4…
## $ jumlah.fashion    <dbl> 1.8576, 1.5404, 1.0060, 0.6080, 0.5368, 7.3240, 4.19…
## $ jumlah.footwear   <dbl> 0.8792, 4.8520, 4.2826, 12.7584, 3.3792, 1.6140, 2.3…
## $ jumlah.lainnya    <dbl> 0.8394, 1.2664, 6.6024, 0.5546, 0.0896, 0.2534, 7.70…
## $ total.nilai.tunai <dbl> 0.00, 7.00, 8.44, 1.96, 0.00, 3.01, 4.65, 3.12, 0.00…
## $ lama.member       <fct> 2, 4, 1, 2, 4, 2, 3, 4, 4, 1, 4, 2, 1, 4, 3, 3, 2, 3…
## $ promo             <fct> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ promo             <fct> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
table(moklas3.jk.train$promo)
## 
##   0   1 
## 192  96
table(down_train$promo)
## 
##  0  1 
## 96 96

RANDOM OVERSAMPLING

set.seed(16)
up_train <- upSample(x = moklas3.jk.train,y 
= moklas3.jk.train$promo)

colnames(up_train)[colnames(up_train)=="Class"] = "promo"

glimpse(up_train) 
## Rows: 384
## Columns: 10
## $ cabang            <fct> 9, 14, 11, 9, 1, 10, 5, 1, 10, 13, 8, 10, 14, 11, 1,…
## $ usia              <fct> 1, 3, 2, 4, 3, 4, 3, 3, 4, 3, 2, 2, 1, 3, 3, 3, 2, 3…
## $ pendidikan        <fct> 3, 3, 3, 2, 2, 4, 3, 4, 3, 4, 4, 3, 3, 3, 4, 4, 3, 3…
## $ jumlah.fashion    <dbl> 1.0704, 1.6164, 3.9232, 0.3460, 1.2704, 4.8048, 2.91…
## $ jumlah.footwear   <dbl> 6.0520, 1.5922, 0.8934, 0.2968, 0.0000, 0.7414, 0.91…
## $ jumlah.lainnya    <dbl> 6.6150, 0.8816, 3.8912, 0.4654, 1.9560, 0.9392, 4.30…
## $ total.nilai.tunai <dbl> 8.43, 1.79, 7.55, 1.05, 2.68, 4.89, 0.00, 3.01, 1.01…
## $ lama.member       <fct> 1, 4, 4, 1, 1, 2, 4, 2, 3, 3, 3, 3, 3, 2, 2, 2, 4, 2…
## $ promo             <fct> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ promo             <fct> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
table(moklas3.jk.train$promo)
## 
##   0   1 
## 192  96
table(up_train$promo)
## 
##   0   1 
## 192 192

SMOTE

set.seed(16)
smote_train <- SMOTE(promo ~ ., data = moklas3.jk.train,perc.over=200, perc.under=100)

glimpse(smote_train)
## Rows: 480
## Columns: 9
## $ cabang            <fct> 9, 6, 4, 7, 9, 10, 10, 9, 14, 2, 4, 8, 9, 10, 1, 1, …
## $ usia              <fct> 2, 3, 4, 1, 1, 4, 3, 2, 4, 3, 2, 3, 1, 1, 3, 3, 3, 2…
## $ pendidikan        <fct> 4, 2, 2, 4, 2, 4, 3, 4, 4, 2, 4, 4, 3, 2, 3, 4, 3, 3…
## $ jumlah.fashion    <dbl> 0.7832, 1.9242, 0.7848, 1.5896, 2.8040, 4.8048, 1.99…
## $ jumlah.footwear   <dbl> 1.2180, 0.0536, 0.4864, 2.9464, 9.1798, 0.7414, 3.88…
## $ jumlah.lainnya    <dbl> 0.7184, 1.7272, 0.4528, 1.1348, 2.1960, 0.9392, 2.81…
## $ total.nilai.tunai <dbl> 2.35, 0.24, 0.00, 0.00, 0.00, 4.89, 6.99, 2.35, 0.00…
## $ lama.member       <fct> 3, 2, 2, 4, 3, 2, 3, 3, 3, 4, 4, 1, 1, 1, 4, 2, 4, 4…
## $ promo             <fct> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
table(moklas3.jk.train$promo)
## 
##   0   1 
## 192  96
table(smote_train$promo)
## 
##   0   1 
## 192 288

RANDOM FOREST

## Set seed for reproducibility
set.seed(16)

## Define repeated cross validation with 5 folds and three repeats
repeat_cv <- trainControl(method='repeatedcv', number=5)
## Set seed for reproducibility
set.seed(16)

## Train a random forest model
forest <- train(
        
        # Formula. We are using all variables to predict Species
        promo~., 
        
        # Source of data; remove the Species variable
        data=moklas3.jk.train, 
        
        # `rf` method for random forest
        method='rf', 
        
        # Add repeated cross validation as trControl
        trControl=repeat_cv,
        
        # Accuracy to measure the performance of the model
        metric='Accuracy')

## Print out the details about the model
forest$finalModel
## 
## Call:
##  randomForest(x = x, y = y, mtry = param$mtry) 
##                Type of random forest: classification
##                      Number of trees: 500
## No. of variables tried at each split: 2
## 
##         OOB estimate of  error rate: 28.47%
## Confusion matrix:
##     0  1 class.error
## 0 188  4  0.02083333
## 1  78 18  0.81250000
#TRAINING DATA
confusionMatrix(forest$trainingData$.outcome,moklas3.jk.train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0  96
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9873, 1)
##     No Information Rate : 0.6667     
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0000     
##             Specificity : 1.0000     
##          Pos Pred Value : 1.0000     
##          Neg Pred Value : 1.0000     
##              Prevalence : 0.3333     
##          Detection Rate : 0.3333     
##    Detection Prevalence : 0.3333     
##       Balanced Accuracy : 1.0000     
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats <- predict(
        
        ## Random forest object
        object=forest, 
        
        ## Data to use for predictions; remove the Species
        newdata=moklas3.jk.test[, -9])
#TESTING DATA
confusionMatrix(y_hats,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 47 20
##          1  0  4
##                                          
##                Accuracy : 0.7183         
##                  95% CI : (0.599, 0.8187)
##     No Information Rate : 0.662          
##     P-Value [Acc > NIR] : 0.1908         
##                                          
##                   Kappa : 0.2094         
##                                          
##  Mcnemar's Test P-Value : 2.152e-05      
##                                          
##             Sensitivity : 0.16667        
##             Specificity : 1.00000        
##          Pos Pred Value : 1.00000        
##          Neg Pred Value : 0.70149        
##              Prevalence : 0.33803        
##          Detection Rate : 0.05634        
##    Detection Prevalence : 0.05634        
##       Balanced Accuracy : 0.58333        
##                                          
##        'Positive' Class : 1              
## 
library(plotly)
## 
## Attaching package: 'plotly'
## The following object is masked from 'package:xgboost':
## 
##     slice
## The following object is masked from 'package:MASS':
## 
##     select
## The following object is masked from 'package:ggplot2':
## 
##     last_plot
## The following object is masked from 'package:stats':
## 
##     filter
## The following object is masked from 'package:graphics':
## 
##     layout
set.seed(16)
RSS.test <- c()
ntreesVal = c(50, 100, 200, 300, 400, 500, 500, 700, 800, 900)
moklas3.jk.test$promo <-as.numeric(as.character(moklas3.jk.test$promo))

for (i in ntreesVal) {
    rf.model <- randomForest(promo ~ ., data = moklas3.jk.train, ntree = i)
    rf.test <- predict(rf.model, newdata = moklas3.jk.test[,-9])
    rf.test <-as.numeric(as.character(rf.test))
    RSS.rf <- sum((rf.test-moklas3.jk.test$promo)^2)
    RSS.test <- c(RSS.test, RSS.rf)
}

data <- data.frame(ntreesVal, RSS.test)

fig <- plot_ly(data, x = ntreesVal, y = RSS.test, type = "scatter", mode = "lines")
fig <- fig %>% layout(title = "Fine Tune 'ntree' In Random Forest", xaxis = list(title = "ntree"), 
    yaxis = list(title = "RSS on test set"))

fig
set.seed(16)
RSS.test <- c()
mtryVal = seq(1, 9, by = 1)
moklas3.jk.test$promo <-as.numeric(as.character(moklas3.jk.test$promo))

for (i in mtryVal) {
    rf.model <- randomForest(promo ~ ., data = moklas3.jk.train, ntree = 700, mtry = i)
    rf.test <- predict(rf.model, newdata = moklas3.jk.test[,-9])
    rf.test <-as.numeric(as.character(rf.test))
    RSS.rf <- sum(array(rf.test-moklas3.jk.test$promo)^2)
    RSS.test <- c(RSS.test, RSS.rf)
}
## Warning in randomForest.default(m, y, ...): invalid mtry: reset to within valid
## range
data <- data.frame(mtryVal, RSS.test)

fig <- plot_ly(data, x = ~mtryVal, y = ~RSS.test, type = "scatter", mode = "lines")
fig <- fig %>% layout(title = "Fine Tune 'mtry' In Random Forest", xaxis = list(title = "mtry"), 
    yaxis = list(title = "RSS on test set"))

fig
#Grid Search CV
control <- trainControl(method ="repeatedcv",
                        number = 10,
                        repeats = 3,
                        search = "grid")
set.seed(16)
tunegrid <- expand.grid(.mtry=c(1:10))
rf_gridsearch <- train(promo~.,
                       data = moklas3.jk.train,
                       method = "rf",
                       metric = "Accuracy",
                       tuneGrid = tunegrid,
                       trControl = control,
                       ntree=700)
rf_gridsearch
## Random Forest 
## 
## 288 samples
##   8 predictor
##   2 classes: '0', '1' 
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold, repeated 3 times) 
## Summary of sample sizes: 259, 260, 259, 260, 258, 260, ... 
## Resampling results across tuning parameters:
## 
##   mtry  Accuracy   Kappa    
##    1    0.6668172  0.0000000
##    2    0.7164340  0.2129387
##    3    0.7477641  0.3570924
##    4    0.7426081  0.3646666
##    5    0.7380405  0.3611552
##    6    0.7347510  0.3515912
##    7    0.7369704  0.3604339
##    8    0.7230104  0.3250457
##    9    0.7183279  0.3196494
##   10    0.7310974  0.3496945
## 
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 3.

mtry=2

##Modeling Train Data (mtry=2)
library(randomForest)
set.seed(16)
rf_model1<-randomForest(promo~., data = moklas3.jk.train, mtry=2, ntree=700)
rf_model1
## 
## Call:
##  randomForest(formula = promo ~ ., data = moklas3.jk.train, mtry = 2,      ntree = 700) 
##                Type of random forest: classification
##                      Number of trees: 700
## No. of variables tried at each split: 2
## 
##         OOB estimate of  error rate: 25%
## Confusion matrix:
##     0  1 class.error
## 0 163 29   0.1510417
## 1  43 53   0.4479167
#TRAINING DATA
pred.rf1 <-as.factor(as.character(rf_model1$predicted))
moklas3.jk.train$promo <-as.factor(as.character(moklas3.jk.train$promo))
confusionMatrix(pred.rf1,moklas3.jk.train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 163  43
##          1  29  53
##                                           
##                Accuracy : 0.75            
##                  95% CI : (0.6958, 0.7989)
##     No Information Rate : 0.6667          
##     P-Value [Acc > NIR] : 0.001345        
##                                           
##                   Kappa : 0.4162          
##                                           
##  Mcnemar's Test P-Value : 0.125506        
##                                           
##             Sensitivity : 0.5521          
##             Specificity : 0.8490          
##          Pos Pred Value : 0.6463          
##          Neg Pred Value : 0.7913          
##              Prevalence : 0.3333          
##          Detection Rate : 0.1840          
##    Detection Prevalence : 0.2847          
##       Balanced Accuracy : 0.7005          
##                                           
##        'Positive' Class : 1               
## 
## Generate predictions
y_hats1 <- predict(
        
        ## Random forest object
        object=rf_model1, 
        
        ## Data to use for predictions; remove the Species
        newdata=moklas3.jk.test[, -9])
#TESTING DATA
y_hats1 <-as.factor(as.character(y_hats1))
moklas3.jk.test$promo <-as.factor(as.character(moklas3.jk.test$promo))
confusionMatrix(y_hats1,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 42 11
##          1  5 13
##                                         
##                Accuracy : 0.7746        
##                  95% CI : (0.66, 0.8654)
##     No Information Rate : 0.662         
##     P-Value [Acc > NIR] : 0.02706       
##                                         
##                   Kappa : 0.4636        
##                                         
##  Mcnemar's Test P-Value : 0.21130       
##                                         
##             Sensitivity : 0.5417        
##             Specificity : 0.8936        
##          Pos Pred Value : 0.7222        
##          Neg Pred Value : 0.7925        
##              Prevalence : 0.3380        
##          Detection Rate : 0.1831        
##    Detection Prevalence : 0.2535        
##       Balanced Accuracy : 0.7176        
##                                         
##        'Positive' Class : 1             
## 

mtry=3

##Modeling Train Data (mtry=3)
library(randomForest)
set.seed(16)
rf_model2<-randomForest(promo~., data = moklas3.jk.train, mtry=3, ntree=700)
rf_model2
## 
## Call:
##  randomForest(formula = promo ~ ., data = moklas3.jk.train, mtry = 3,      ntree = 700) 
##                Type of random forest: classification
##                      Number of trees: 700
## No. of variables tried at each split: 3
## 
##         OOB estimate of  error rate: 26.39%
## Confusion matrix:
##     0  1 class.error
## 0 160 32   0.1666667
## 1  44 52   0.4583333
#TRAINING DATA
pred.rf2 <-as.factor(as.character(rf_model2$predicted))
moklas3.jk.train$promo <-as.factor(as.character(moklas3.jk.train$promo))
confusionMatrix(pred.rf2,moklas3.jk.train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 160  44
##          1  32  52
##                                           
##                Accuracy : 0.7361          
##                  95% CI : (0.6812, 0.7861)
##     No Information Rate : 0.6667          
##     P-Value [Acc > NIR] : 0.006616        
##                                           
##                   Kappa : 0.3871          
##                                           
##  Mcnemar's Test P-Value : 0.207026        
##                                           
##             Sensitivity : 0.5417          
##             Specificity : 0.8333          
##          Pos Pred Value : 0.6190          
##          Neg Pred Value : 0.7843          
##              Prevalence : 0.3333          
##          Detection Rate : 0.1806          
##    Detection Prevalence : 0.2917          
##       Balanced Accuracy : 0.6875          
##                                           
##        'Positive' Class : 1               
## 
## Generate predictions
y_hats2 <- predict(
        
        ## Random forest object
        object=rf_model2, 
        
        ## Data to use for predictions; remove the Species
        newdata=moklas3.jk.test[, -9])
#TESTING DATA
y_hats2 <-as.factor(as.character(y_hats2))
moklas3.jk.test$promo <-as.factor(as.character(moklas3.jk.test$promo))
confusionMatrix(y_hats2,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 42 10
##          1  5 14
##                                           
##                Accuracy : 0.7887          
##                  95% CI : (0.6756, 0.8767)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.01404         
##                                           
##                   Kappa : 0.5026          
##                                           
##  Mcnemar's Test P-Value : 0.30170         
##                                           
##             Sensitivity : 0.5833          
##             Specificity : 0.8936          
##          Pos Pred Value : 0.7368          
##          Neg Pred Value : 0.8077          
##              Prevalence : 0.3380          
##          Detection Rate : 0.1972          
##    Detection Prevalence : 0.2676          
##       Balanced Accuracy : 0.7385          
##                                           
##        'Positive' Class : 1               
## 

mtry=4

##Modeling Train Data (mtry=4)
library(randomForest)
set.seed(16)
rf_model3<-randomForest(promo~., data = moklas3.jk.train, mtry=4, ntree=700)
rf_model3
## 
## Call:
##  randomForest(formula = promo ~ ., data = moklas3.jk.train, mtry = 4,      ntree = 700) 
##                Type of random forest: classification
##                      Number of trees: 700
## No. of variables tried at each split: 4
## 
##         OOB estimate of  error rate: 27.08%
## Confusion matrix:
##     0  1 class.error
## 0 159 33    0.171875
## 1  45 51    0.468750
#TRAINING DATA
pred.rf3 <-as.factor(as.character(rf_model3$predicted))
moklas3.jk.train$promo <-as.factor(as.character(moklas3.jk.train$promo))
confusionMatrix(pred.rf3,moklas3.jk.train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 159  45
##          1  33  51
##                                           
##                Accuracy : 0.7292          
##                  95% CI : (0.6739, 0.7796)
##     No Information Rate : 0.6667          
##     P-Value [Acc > NIR] : 0.01329         
##                                           
##                   Kappa : 0.371           
##                                           
##  Mcnemar's Test P-Value : 0.21295         
##                                           
##             Sensitivity : 0.5312          
##             Specificity : 0.8281          
##          Pos Pred Value : 0.6071          
##          Neg Pred Value : 0.7794          
##              Prevalence : 0.3333          
##          Detection Rate : 0.1771          
##    Detection Prevalence : 0.2917          
##       Balanced Accuracy : 0.6797          
##                                           
##        'Positive' Class : 1               
## 
## Generate predictions
y_hats3 <- predict(
        
        ## Random forest object
        object=rf_model3, 
        
        ## Data to use for predictions; remove the Species
        newdata=moklas3.jk.test[, -9])
#TESTING DATA
y_hats3 <-as.factor(as.character(y_hats3))
moklas3.jk.test$promo <-as.factor(as.character(moklas3.jk.test$promo))
confusionMatrix(y_hats3,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 40  9
##          1  7 15
##                                         
##                Accuracy : 0.7746        
##                  95% CI : (0.66, 0.8654)
##     No Information Rate : 0.662         
##     P-Value [Acc > NIR] : 0.02706       
##                                         
##                   Kappa : 0.486         
##                                         
##  Mcnemar's Test P-Value : 0.80259       
##                                         
##             Sensitivity : 0.6250        
##             Specificity : 0.8511        
##          Pos Pred Value : 0.6818        
##          Neg Pred Value : 0.8163        
##              Prevalence : 0.3380        
##          Detection Rate : 0.2113        
##    Detection Prevalence : 0.3099        
##       Balanced Accuracy : 0.7380        
##                                         
##        'Positive' Class : 1             
## 

mtry=9

##Modeling Train Data (mtry=3)
library(randomForest)
set.seed(16)
rf_model7<-randomForest(promo~., data = moklas3.jk.train, mtry=9, ntree=700)
## Warning in randomForest.default(m, y, ...): invalid mtry: reset to within valid
## range
rf_model7
## 
## Call:
##  randomForest(formula = promo ~ ., data = moklas3.jk.train, mtry = 9,      ntree = 700) 
##                Type of random forest: classification
##                      Number of trees: 700
## No. of variables tried at each split: 8
## 
##         OOB estimate of  error rate: 26.04%
## Confusion matrix:
##     0  1 class.error
## 0 161 31   0.1614583
## 1  44 52   0.4583333
#TRAINING DATA
pred.rf7 <-as.factor(as.character(rf_model3$predicted))
moklas3.jk.train$promo <-as.factor(as.character(moklas3.jk.train$promo))
confusionMatrix(pred.rf7,moklas3.jk.train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 159  45
##          1  33  51
##                                           
##                Accuracy : 0.7292          
##                  95% CI : (0.6739, 0.7796)
##     No Information Rate : 0.6667          
##     P-Value [Acc > NIR] : 0.01329         
##                                           
##                   Kappa : 0.371           
##                                           
##  Mcnemar's Test P-Value : 0.21295         
##                                           
##             Sensitivity : 0.5312          
##             Specificity : 0.8281          
##          Pos Pred Value : 0.6071          
##          Neg Pred Value : 0.7794          
##              Prevalence : 0.3333          
##          Detection Rate : 0.1771          
##    Detection Prevalence : 0.2917          
##       Balanced Accuracy : 0.6797          
##                                           
##        'Positive' Class : 1               
## 
## Generate predictions
y_hats7 <- predict(
        
        ## Random forest object
        object=rf_model7, 
        
        ## Data to use for predictions; remove the Species
        newdata=moklas3.jk.test[, -9])
#TESTING DATA
y_hats7 <-as.factor(as.character(y_hats7))
moklas3.jk.test$promo <-as.factor(as.character(moklas3.jk.test$promo))
confusionMatrix(y_hats7,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 41 10
##          1  6 14
##                                         
##                Accuracy : 0.7746        
##                  95% CI : (0.66, 0.8654)
##     No Information Rate : 0.662         
##     P-Value [Acc > NIR] : 0.02706       
##                                         
##                   Kappa : 0.475         
##                                         
##  Mcnemar's Test P-Value : 0.45325       
##                                         
##             Sensitivity : 0.5833        
##             Specificity : 0.8723        
##          Pos Pred Value : 0.7000        
##          Neg Pred Value : 0.8039        
##              Prevalence : 0.3380        
##          Detection Rate : 0.1972        
##    Detection Prevalence : 0.2817        
##       Balanced Accuracy : 0.7278        
##                                         
##        'Positive' Class : 1             
## 
varImpPlot(rf_model3)

RUS

## Set seed for reproducibility
set.seed(16)

## Train a random forest model
rf_model4 <- train(
        promo~., 
        data=down_train, 
        method='rf', 
        trControl=repeat_cv,
        metric='Accuracy')
## Warning in model.matrix.default(Terms, m, contrasts): the response appeared on
## the right-hand side and was dropped
## Warning in model.matrix.default(Terms, m, contrasts): problem with term 9 in
## model.matrix: no columns are assigned
## Print out the details about the model
rf_model4$finalModel
## 
## Call:
##  randomForest(x = x, y = y, mtry = param$mtry) 
##                Type of random forest: classification
##                      Number of trees: 500
## No. of variables tried at each split: 2
## 
##         OOB estimate of  error rate: 36.98%
## Confusion matrix:
##    0  1 class.error
## 0 63 33   0.3437500
## 1 38 58   0.3958333
#TRAINING DATA
confusionMatrix(rf_model4$trainingData$.outcome,down_train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 96  0
##          1  0 96
##                                     
##                Accuracy : 1         
##                  95% CI : (0.981, 1)
##     No Information Rate : 0.5       
##     P-Value [Acc > NIR] : < 2.2e-16 
##                                     
##                   Kappa : 1         
##                                     
##  Mcnemar's Test P-Value : NA        
##                                     
##             Sensitivity : 1.0       
##             Specificity : 1.0       
##          Pos Pred Value : 1.0       
##          Neg Pred Value : 1.0       
##              Prevalence : 0.5       
##          Detection Rate : 0.5       
##    Detection Prevalence : 0.5       
##       Balanced Accuracy : 1.0       
##                                     
##        'Positive' Class : 1         
## 
## Generate predictions
y_hats4 <- predict(
        
        ## Random forest object
        object=rf_model4, 
        
        ## Data to use for predictions; remove the Species
        newdata=moklas3.jk.test[, -9])
#TESTING DATA
y_hats4 <-as.factor(as.character(y_hats4))
moklas3.jk.test$promo <-as.factor(as.character(moklas3.jk.test$promo))
confusionMatrix(y_hats4,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 35  8
##          1 12 16
##                                          
##                Accuracy : 0.7183         
##                  95% CI : (0.599, 0.8187)
##     No Information Rate : 0.662          
##     P-Value [Acc > NIR] : 0.1908         
##                                          
##                   Kappa : 0.3952         
##                                          
##  Mcnemar's Test P-Value : 0.5023         
##                                          
##             Sensitivity : 0.6667         
##             Specificity : 0.7447         
##          Pos Pred Value : 0.5714         
##          Neg Pred Value : 0.8140         
##              Prevalence : 0.3380         
##          Detection Rate : 0.2254         
##    Detection Prevalence : 0.3944         
##       Balanced Accuracy : 0.7057         
##                                          
##        'Positive' Class : 1              
## 

ROS

## Set seed for reproducibility
set.seed(16)

## Train a random forest model
rf_model5 <- train(
        promo~., 
        data=up_train, 
        method='rf', 
        trControl=repeat_cv,
        metric='Accuracy')
## Warning in model.matrix.default(Terms, m, contrasts): the response appeared on
## the right-hand side and was dropped
## Warning in model.matrix.default(Terms, m, contrasts): problem with term 9 in
## model.matrix: no columns are assigned
## Print out the details about the model
rf_model5$finalModel
## 
## Call:
##  randomForest(x = x, y = y, mtry = param$mtry) 
##                Type of random forest: classification
##                      Number of trees: 500
## No. of variables tried at each split: 14
## 
##         OOB estimate of  error rate: 12.76%
## Confusion matrix:
##     0   1 class.error
## 0 158  34   0.1770833
## 1  15 177   0.0781250
#TRAINING DATA
confusionMatrix(rf_model5$trainingData$.outcome,up_train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0 192
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9904, 1)
##     No Information Rate : 0.5        
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0        
##             Specificity : 1.0        
##          Pos Pred Value : 1.0        
##          Neg Pred Value : 1.0        
##              Prevalence : 0.5        
##          Detection Rate : 0.5        
##    Detection Prevalence : 0.5        
##       Balanced Accuracy : 1.0        
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats5 <- predict(
        
        ## Random forest object
        object=rf_model5, 
        
        ## Data to use for predictions; remove the Species
        newdata=moklas3.jk.test[, -9])
#TESTING DATA
y_hats5 <-as.factor(as.character(y_hats5))
moklas3.jk.test$promo <-as.factor(as.character(moklas3.jk.test$promo))
confusionMatrix(y_hats5,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 40 12
##          1  7 12
##                                           
##                Accuracy : 0.7324          
##                  95% CI : (0.6141, 0.8306)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.1285          
##                                           
##                   Kappa : 0.3699          
##                                           
##  Mcnemar's Test P-Value : 0.3588          
##                                           
##             Sensitivity : 0.5000          
##             Specificity : 0.8511          
##          Pos Pred Value : 0.6316          
##          Neg Pred Value : 0.7692          
##              Prevalence : 0.3380          
##          Detection Rate : 0.1690          
##    Detection Prevalence : 0.2676          
##       Balanced Accuracy : 0.6755          
##                                           
##        'Positive' Class : 1               
## 

SMOTE

library(randomForest)
set.seed(16)
rf_model6<-randomForest(promo~., data = smote_train, mtry=4, ntree=700)
rf_model6
## 
## Call:
##  randomForest(formula = promo ~ ., data = smote_train, mtry = 4,      ntree = 700) 
##                Type of random forest: classification
##                      Number of trees: 700
## No. of variables tried at each split: 4
## 
##         OOB estimate of  error rate: 10.21%
## Confusion matrix:
##     0   1 class.error
## 0 165  27  0.14062500
## 1  22 266  0.07638889
#TRAINING DATA
pred.rf6 <-as.factor(as.character(rf_model6$predicted))
smote_train$promo <-as.factor(as.character(smote_train$promo))
confusionMatrix(pred.rf6,smote_train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 165  22
##          1  27 266
##                                           
##                Accuracy : 0.8979          
##                  95% CI : (0.8673, 0.9235)
##     No Information Rate : 0.6             
##     P-Value [Acc > NIR] : <2e-16          
##                                           
##                   Kappa : 0.7864          
##                                           
##  Mcnemar's Test P-Value : 0.5677          
##                                           
##             Sensitivity : 0.9236          
##             Specificity : 0.8594          
##          Pos Pred Value : 0.9078          
##          Neg Pred Value : 0.8824          
##              Prevalence : 0.6000          
##          Detection Rate : 0.5542          
##    Detection Prevalence : 0.6104          
##       Balanced Accuracy : 0.8915          
##                                           
##        'Positive' Class : 1               
## 
## Generate predictions
y_hats6 <- predict(
        
        ## Random forest object
        object=rf_model6, 
        
        ## Data to use for predictions; remove the Species
        newdata=moklas3.jk.test[, -9])
#TESTING DATA
y_hats6 <-as.factor(as.character(y_hats6))
moklas3.jk.test$promo <-as.factor(as.character(moklas3.jk.test$promo))
confusionMatrix(y_hats6,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 31  5
##          1 16 19
##                                           
##                Accuracy : 0.7042          
##                  95% CI : (0.5841, 0.8067)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.2681          
##                                           
##                   Kappa : 0.4057          
##                                           
##  Mcnemar's Test P-Value : 0.0291          
##                                           
##             Sensitivity : 0.7917          
##             Specificity : 0.6596          
##          Pos Pred Value : 0.5429          
##          Neg Pred Value : 0.8611          
##              Prevalence : 0.3380          
##          Detection Rate : 0.2676          
##    Detection Prevalence : 0.4930          
##       Balanced Accuracy : 0.7256          
##                                           
##        'Positive' Class : 1               
## 

EXTRA TREES

## Set seed for reproducibility
set.seed(16)

## Define repeated cross validation with 5 folds and three repeats
repeat_cv <- trainControl(method='repeatedcv', number=5)

## Set seed for reproducibility
set.seed(16)

## Train a random forest model
extraT <- train(
  
  # Formula. We are using all variables to predict Species
  promo~.,
  tuneGrid = data.frame(mtry = c(1,2,3,4,5,6,7,8,9,10),
                        splitrule = c("extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees"),min.node.size = 10L),
  
  # Source of data; remove the Species variable
  data=moklas3.jk.train, 
  
  # `rf` method for random forest
  method='ranger', 
  
  # Add repeated cross validation as trControl
  trControl=repeat_cv
  )

## Print out the details about the model
extraT$finalModel
## Ranger result
## 
## Call:
##  ranger::ranger(dependent.variable.name = ".outcome", data = x,      mtry = min(param$mtry, ncol(x)), min.node.size = param$min.node.size,      splitrule = as.character(param$splitrule), write.forest = TRUE,      probability = classProbs, ...) 
## 
## Type:                             Classification 
## Number of trees:                  500 
## Sample size:                      288 
## Number of independent variables:  26 
## Mtry:                             4 
## Target node size:                 10 
## Variable importance mode:         none 
## Splitrule:                        extratrees 
## Number of random splits:          1 
## OOB prediction error:             23.96 %
extraT$trainingData
extraT$bestTune
confusionMatrix(extraT$trainingData$.outcome,moklas3.jk.train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0  96
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9873, 1)
##     No Information Rate : 0.6667     
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0000     
##             Specificity : 1.0000     
##          Pos Pred Value : 1.0000     
##          Neg Pred Value : 1.0000     
##              Prevalence : 0.3333     
##          Detection Rate : 0.3333     
##    Detection Prevalence : 0.3333     
##       Balanced Accuracy : 1.0000     
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats_e1 <- predict(
  
  ## Random forest object
  object=extraT, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_e1,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 46 13
##          1  1 11
##                                           
##                Accuracy : 0.8028          
##                  95% CI : (0.6914, 0.8878)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.006749        
##                                           
##                   Kappa : 0.498           
##                                           
##  Mcnemar's Test P-Value : 0.003283        
##                                           
##             Sensitivity : 0.4583          
##             Specificity : 0.9787          
##          Pos Pred Value : 0.9167          
##          Neg Pred Value : 0.7797          
##              Prevalence : 0.3380          
##          Detection Rate : 0.1549          
##    Detection Prevalence : 0.1690          
##       Balanced Accuracy : 0.7185          
##                                           
##        'Positive' Class : 1               
## 
## Set seed for reproducibility
set.seed(16)

## Define repeated cross validation with 5 folds and three repeats
repeat_cv <- trainControl(method='repeatedcv', number=5)
extra.best<-extraT$bestTune

## Set seed for reproducibility
set.seed(16)

## Train a random forest model
extraT.g <- train(
  
  # Formula. We are using all variables to predict Species
  promo~.,
  tuneGrid = extra.best,
  data=moklas3.jk.train, 
  method='ranger', 
  trControl=repeat_cv
  )

## Print out the details about the model
extraT.g$finalModel
## Ranger result
## 
## Call:
##  ranger::ranger(dependent.variable.name = ".outcome", data = x,      mtry = min(param$mtry, ncol(x)), min.node.size = param$min.node.size,      splitrule = as.character(param$splitrule), write.forest = TRUE,      probability = classProbs, ...) 
## 
## Type:                             Classification 
## Number of trees:                  500 
## Sample size:                      288 
## Number of independent variables:  26 
## Mtry:                             4 
## Target node size:                 10 
## Variable importance mode:         none 
## Splitrule:                        extratrees 
## Number of random splits:          1 
## OOB prediction error:             26.39 %
confusionMatrix(extraT.g$trainingData$.outcome,moklas3.jk.train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0  96
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9873, 1)
##     No Information Rate : 0.6667     
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0000     
##             Specificity : 1.0000     
##          Pos Pred Value : 1.0000     
##          Neg Pred Value : 1.0000     
##              Prevalence : 0.3333     
##          Detection Rate : 0.3333     
##    Detection Prevalence : 0.3333     
##       Balanced Accuracy : 1.0000     
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats_e1.g <- predict(
  
  ## Random forest object
  object=extraT.g, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_e1.g,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 46 13
##          1  1 11
##                                           
##                Accuracy : 0.8028          
##                  95% CI : (0.6914, 0.8878)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.006749        
##                                           
##                   Kappa : 0.498           
##                                           
##  Mcnemar's Test P-Value : 0.003283        
##                                           
##             Sensitivity : 0.4583          
##             Specificity : 0.9787          
##          Pos Pred Value : 0.9167          
##          Neg Pred Value : 0.7797          
##              Prevalence : 0.3380          
##          Detection Rate : 0.1549          
##    Detection Prevalence : 0.1690          
##       Balanced Accuracy : 0.7185          
##                                           
##        'Positive' Class : 1               
## 

RUS

## Set seed for reproducibility
set.seed(16)
repeat_cv <- trainControl(method='repeatedcv', number=5)

set.seed(16)
extraT1 <- train(
  promo~.,
  tuneGrid = data.frame(mtry = c(1,2,3,4,5,6,7,8,9,10),
                        splitrule = c("extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees"),min.node.size = 10L),
  data=down_train, 
  method='ranger', 
  trControl=repeat_cv
  )
## Warning in model.matrix.default(Terms, m, contrasts): the response appeared on
## the right-hand side and was dropped
## Warning in model.matrix.default(Terms, m, contrasts): problem with term 9 in
## model.matrix: no columns are assigned
## Print out the details about the model
extraT1$finalModel
## Ranger result
## 
## Call:
##  ranger::ranger(dependent.variable.name = ".outcome", data = x,      mtry = min(param$mtry, ncol(x)), min.node.size = param$min.node.size,      splitrule = as.character(param$splitrule), write.forest = TRUE,      probability = classProbs, ...) 
## 
## Type:                             Classification 
## Number of trees:                  500 
## Sample size:                      192 
## Number of independent variables:  26 
## Mtry:                             8 
## Target node size:                 10 
## Variable importance mode:         none 
## Splitrule:                        extratrees 
## Number of random splits:          1 
## OOB prediction error:             33.33 %
confusionMatrix(extraT1$trainingData$.outcome,down_train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 96  0
##          1  0 96
##                                     
##                Accuracy : 1         
##                  95% CI : (0.981, 1)
##     No Information Rate : 0.5       
##     P-Value [Acc > NIR] : < 2.2e-16 
##                                     
##                   Kappa : 1         
##                                     
##  Mcnemar's Test P-Value : NA        
##                                     
##             Sensitivity : 1.0       
##             Specificity : 1.0       
##          Pos Pred Value : 1.0       
##          Neg Pred Value : 1.0       
##              Prevalence : 0.5       
##          Detection Rate : 0.5       
##    Detection Prevalence : 0.5       
##       Balanced Accuracy : 1.0       
##                                     
##        'Positive' Class : 1         
## 
## Generate predictions
y_hats_e2 <- predict(
  
  ## Random forest object
  object=extraT1, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_e2,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 40  8
##          1  7 16
##                                           
##                Accuracy : 0.7887          
##                  95% CI : (0.6756, 0.8767)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.01404         
##                                           
##                   Kappa : 0.5231          
##                                           
##  Mcnemar's Test P-Value : 1.00000         
##                                           
##             Sensitivity : 0.6667          
##             Specificity : 0.8511          
##          Pos Pred Value : 0.6957          
##          Neg Pred Value : 0.8333          
##              Prevalence : 0.3380          
##          Detection Rate : 0.2254          
##    Detection Prevalence : 0.3239          
##       Balanced Accuracy : 0.7589          
##                                           
##        'Positive' Class : 1               
## 

ROS

## Set seed for reproducibility
set.seed(16)
repeat_cv <- trainControl(method='repeatedcv', number=5)

set.seed(16)
extraT2 <- train(
  promo~.,
  tuneGrid = data.frame(mtry = c(1,2,3,4,5,6,7,8,9,10),
                        splitrule = c("extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees"),min.node.size = 10L),
  data=up_train, 
  method='ranger', 
  trControl=repeat_cv
  )
## Warning in model.matrix.default(Terms, m, contrasts): the response appeared on
## the right-hand side and was dropped
## Warning in model.matrix.default(Terms, m, contrasts): problem with term 9 in
## model.matrix: no columns are assigned
## Print out the details about the model
extraT2$finalModel
## Ranger result
## 
## Call:
##  ranger::ranger(dependent.variable.name = ".outcome", data = x,      mtry = min(param$mtry, ncol(x)), min.node.size = param$min.node.size,      splitrule = as.character(param$splitrule), write.forest = TRUE,      probability = classProbs, ...) 
## 
## Type:                             Classification 
## Number of trees:                  500 
## Sample size:                      384 
## Number of independent variables:  26 
## Mtry:                             6 
## Target node size:                 10 
## Variable importance mode:         none 
## Splitrule:                        extratrees 
## Number of random splits:          1 
## OOB prediction error:             16.93 %
confusionMatrix(extraT2$trainingData$.outcome,up_train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0 192
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9904, 1)
##     No Information Rate : 0.5        
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0        
##             Specificity : 1.0        
##          Pos Pred Value : 1.0        
##          Neg Pred Value : 1.0        
##              Prevalence : 0.5        
##          Detection Rate : 0.5        
##    Detection Prevalence : 0.5        
##       Balanced Accuracy : 1.0        
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats_e3 <- predict(
  
  ## Random forest object
  object=extraT2, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_e3,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 44 11
##          1  3 13
##                                           
##                Accuracy : 0.8028          
##                  95% CI : (0.6914, 0.8878)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.006749        
##                                           
##                   Kappa : 0.5203          
##                                           
##  Mcnemar's Test P-Value : 0.061369        
##                                           
##             Sensitivity : 0.5417          
##             Specificity : 0.9362          
##          Pos Pred Value : 0.8125          
##          Neg Pred Value : 0.8000          
##              Prevalence : 0.3380          
##          Detection Rate : 0.1831          
##    Detection Prevalence : 0.2254          
##       Balanced Accuracy : 0.7389          
##                                           
##        'Positive' Class : 1               
## 

SMOTE

## Set seed for reproducibility
set.seed(16)
repeat_cv <- trainControl(method='repeatedcv', number=5)

set.seed(16)
extraT3 <- train(
  promo~.,
  tuneGrid = data.frame(mtry = c(1,2,3,4,5,6,7,8,9,10),
                        splitrule = c("extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees","extratrees"),min.node.size = 10L),
  data=smote_train, 
  method='ranger', 
  trControl=repeat_cv
  )

## Print out the details about the model
extraT3$finalModel
## Ranger result
## 
## Call:
##  ranger::ranger(dependent.variable.name = ".outcome", data = x,      mtry = min(param$mtry, ncol(x)), min.node.size = param$min.node.size,      splitrule = as.character(param$splitrule), write.forest = TRUE,      probability = classProbs, ...) 
## 
## Type:                             Classification 
## Number of trees:                  500 
## Sample size:                      480 
## Number of independent variables:  26 
## Mtry:                             10 
## Target node size:                 10 
## Variable importance mode:         none 
## Splitrule:                        extratrees 
## Number of random splits:          1 
## OOB prediction error:             19.58 %
confusionMatrix(extraT3$trainingData$.outcome,smote_train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0 288
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9923, 1)
##     No Information Rate : 0.6        
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0        
##             Specificity : 1.0        
##          Pos Pred Value : 1.0        
##          Neg Pred Value : 1.0        
##              Prevalence : 0.6        
##          Detection Rate : 0.6        
##    Detection Prevalence : 0.6        
##       Balanced Accuracy : 1.0        
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats_e4 <- predict(
  
  ## Random forest object
  object=extraT3, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_e4,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 34  6
##          1 13 18
##                                           
##                Accuracy : 0.7324          
##                  95% CI : (0.6141, 0.8306)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.1285          
##                                           
##                   Kappa : 0.4419          
##                                           
##  Mcnemar's Test P-Value : 0.1687          
##                                           
##             Sensitivity : 0.7500          
##             Specificity : 0.7234          
##          Pos Pred Value : 0.5806          
##          Neg Pred Value : 0.8500          
##              Prevalence : 0.3380          
##          Detection Rate : 0.2535          
##    Detection Prevalence : 0.4366          
##       Balanced Accuracy : 0.7367          
##                                           
##        'Positive' Class : 1               
## 

GRADIENT BOOSTING

## Set seed for reproducibility
set.seed(16)

## Define repeated cross validation with 5 folds and three repeats
repeat_cv <- trainControl(method='repeatedcv', number=5)
boost_1 <- train(promo ~., 
               data=moklas3.jk.train, 
               method="gbm",
               tuneLength = 5,  
               trControl=repeat_cv,
               verbose = FALSE)
plot(boost_1, main = "5-Fold Cross Validation Gradient Boosting: tuneLength")

boost_best <- boost_1$bestTune
boost_1 <- train(promo ~., 
               data=moklas3.jk.train, 
               method="gbm",
               tuneGrid  = boost_best,
               verbose = FALSE)
boost_result_1 <- boost_1$results
boost_result_1
boost_1$trainingData
confusionMatrix(boost_1$trainingData$.outcome,moklas3.jk.train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0  96
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9873, 1)
##     No Information Rate : 0.6667     
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0000     
##             Specificity : 1.0000     
##          Pos Pred Value : 1.0000     
##          Neg Pred Value : 1.0000     
##              Prevalence : 0.3333     
##          Detection Rate : 0.3333     
##    Detection Prevalence : 0.3333     
##       Balanced Accuracy : 1.0000     
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats_b1 <- predict(
  
  ## Random forest object
  object=boost_1, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_b1,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 44 12
##          1  3 12
##                                           
##                Accuracy : 0.7887          
##                  95% CI : (0.6756, 0.8767)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.01404         
##                                           
##                   Kappa : 0.4802          
##                                           
##  Mcnemar's Test P-Value : 0.03887         
##                                           
##             Sensitivity : 0.5000          
##             Specificity : 0.9362          
##          Pos Pred Value : 0.8000          
##          Neg Pred Value : 0.7857          
##              Prevalence : 0.3380          
##          Detection Rate : 0.1690          
##    Detection Prevalence : 0.2113          
##       Balanced Accuracy : 0.7181          
##                                           
##        'Positive' Class : 1               
## 

RUS

boost_2 <- train(promo ~., 
               data=down_train, 
               method="gbm",
               tuneLength = 5,  
               trControl=repeat_cv,
               verbose = FALSE)
## Warning in model.matrix.default(Terms, m, contrasts): the response appeared on
## the right-hand side and was dropped
## Warning in model.matrix.default(Terms, m, contrasts): problem with term 9 in
## model.matrix: no columns are assigned
confusionMatrix(boost_2$trainingData$.outcome,down_train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 96  0
##          1  0 96
##                                     
##                Accuracy : 1         
##                  95% CI : (0.981, 1)
##     No Information Rate : 0.5       
##     P-Value [Acc > NIR] : < 2.2e-16 
##                                     
##                   Kappa : 1         
##                                     
##  Mcnemar's Test P-Value : NA        
##                                     
##             Sensitivity : 1.0       
##             Specificity : 1.0       
##          Pos Pred Value : 1.0       
##          Neg Pred Value : 1.0       
##              Prevalence : 0.5       
##          Detection Rate : 0.5       
##    Detection Prevalence : 0.5       
##       Balanced Accuracy : 1.0       
##                                     
##        'Positive' Class : 1         
## 
## Generate predictions
y_hats_b2 <- predict(
  
  ## Random forest object
  object=boost_2, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_b2,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 32  9
##          1 15 15
##                                         
##                Accuracy : 0.662         
##                  95% CI : (0.5399, 0.77)
##     No Information Rate : 0.662         
##     P-Value [Acc > NIR] : 0.5552        
##                                         
##                   Kappa : 0.2882        
##                                         
##  Mcnemar's Test P-Value : 0.3074        
##                                         
##             Sensitivity : 0.6250        
##             Specificity : 0.6809        
##          Pos Pred Value : 0.5000        
##          Neg Pred Value : 0.7805        
##              Prevalence : 0.3380        
##          Detection Rate : 0.2113        
##    Detection Prevalence : 0.4225        
##       Balanced Accuracy : 0.6529        
##                                         
##        'Positive' Class : 1             
## 

ROS

boost_3 <- train(promo ~., 
               data=up_train, 
               method="gbm",
               tuneLength = 5,  
               trControl=repeat_cv,
               verbose = FALSE)
## Warning in model.matrix.default(Terms, m, contrasts): the response appeared on
## the right-hand side and was dropped
## Warning in model.matrix.default(Terms, m, contrasts): problem with term 9 in
## model.matrix: no columns are assigned
confusionMatrix(boost_3$trainingData$.outcome,up_train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0 192
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9904, 1)
##     No Information Rate : 0.5        
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0        
##             Specificity : 1.0        
##          Pos Pred Value : 1.0        
##          Neg Pred Value : 1.0        
##              Prevalence : 0.5        
##          Detection Rate : 0.5        
##    Detection Prevalence : 0.5        
##       Balanced Accuracy : 1.0        
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats_b3 <- predict(
  
  ## Random forest object
  object=boost_3, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_b3,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 42 13
##          1  5 11
##                                           
##                Accuracy : 0.7465          
##                  95% CI : (0.6292, 0.8423)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.08154         
##                                           
##                   Kappa : 0.3832          
##                                           
##  Mcnemar's Test P-Value : 0.09896         
##                                           
##             Sensitivity : 0.4583          
##             Specificity : 0.8936          
##          Pos Pred Value : 0.6875          
##          Neg Pred Value : 0.7636          
##              Prevalence : 0.3380          
##          Detection Rate : 0.1549          
##    Detection Prevalence : 0.2254          
##       Balanced Accuracy : 0.6760          
##                                           
##        'Positive' Class : 1               
## 

SMOTE

boost_4 <- train(promo ~., 
               data=smote_train, 
               method="gbm",
               tuneLength = 5,  
               trControl=repeat_cv,
               verbose = FALSE)
confusionMatrix(boost_4$trainingData$.outcome,smote_train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0 288
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9923, 1)
##     No Information Rate : 0.6        
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0        
##             Specificity : 1.0        
##          Pos Pred Value : 1.0        
##          Neg Pred Value : 1.0        
##              Prevalence : 0.6        
##          Detection Rate : 0.6        
##    Detection Prevalence : 0.6        
##       Balanced Accuracy : 1.0        
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats_b4 <- predict(
  
  ## Random forest object
  object=boost_4, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_b4,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 35  9
##          1 12 15
##                                           
##                Accuracy : 0.7042          
##                  95% CI : (0.5841, 0.8067)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.2681          
##                                           
##                   Kappa : 0.3587          
##                                           
##  Mcnemar's Test P-Value : 0.6625          
##                                           
##             Sensitivity : 0.6250          
##             Specificity : 0.7447          
##          Pos Pred Value : 0.5556          
##          Neg Pred Value : 0.7955          
##              Prevalence : 0.3380          
##          Detection Rate : 0.2113          
##    Detection Prevalence : 0.3803          
##       Balanced Accuracy : 0.6848          
##                                           
##        'Positive' Class : 1               
## 

XTREME GRADIENT BOOSTING

# Basic Parameter Tuning
fitControl <- trainControl(## 5-fold CV
                           method = "repeatedcv",
                           number = 5,
                           ## repeated ten times
                           repeats = 5)

# Alternate Tuning Grids
xgbGrid <-  expand.grid(nrounds = c(300, 500, 1000, 1500),
                        max_depth = 2,
                        eta = c(0.01, 0.02, 0.03),
                        gamma = 0,
                        colsample_bytree = 1,
                        min_child_weight = 1,
                        subsample = 1
                        )

set.seed(16)
xgbFit <- train(promo~ ., data = moklas3.jk.train, 
                 method = "xgbTree", 
                 trControl = fitControl,
                 verbose = FALSE,
                 tuneGrid = xgbGrid,
                 objective="reg:squarederror")
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:02:59] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:02:59] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:02:59] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:02] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:02] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:02] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:04] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:04] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:04] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:06] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:06] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:06] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:09] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:09] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:09] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:11] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:11] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:11] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:14] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:14] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:14] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:16] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:16] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:16] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:18] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:18] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:18] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:21] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:21] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:21] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:23] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:23] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:23] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:26] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:26] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:26] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:28] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:28] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:28] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:30] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:30] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:30] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:33] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:33] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:33] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:35] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:35] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:35] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:37] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:37] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:37] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:40] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:40] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:40] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:42] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:42] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:42] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:44] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:44] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:44] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:47] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:47] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:47] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:49] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:49] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:49] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:52] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:52] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:52] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:54] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:54] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:54] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:57] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:57] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:57] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:03:59] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:59] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:03:59] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:02] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:02] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:02] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:04] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:04] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:04] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:07] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:07] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:07] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:09] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:09] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:09] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:12] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:12] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:12] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:14] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:14] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:14] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:16] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:16] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:16] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:19] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:19] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:19] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:21] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:21] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:21] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:23] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:23] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:23] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:26] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:26] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:26] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:28] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:28] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:28] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:30] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:30] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:30] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:33] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:33] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:33] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:35] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:35] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:35] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:37] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:37] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:37] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:40] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:40] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:40] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:42] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:42] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:42] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:44] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:44] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:44] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:47] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:47] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:47] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:49] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:49] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:49] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:51] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:51] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:51] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:53] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:53] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:53] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:56] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:56] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:56] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:04:58] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:58] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:04:58] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:00] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:00] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:00] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:03] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:03] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:03] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:05] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:05] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:05] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:07] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:07] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:07] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:10] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:10] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:10] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:12] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:12] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:12] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:14] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:14] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:14] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:16] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:16] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:16] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:19] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:19] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:19] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:21] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:21] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:21] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:23] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:23] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:23] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:26] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:26] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:26] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:28] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:28] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:28] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:30] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:30] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:30] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:32] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:32] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:32] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:35] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:35] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:35] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:37] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:37] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:37] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:39] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:39] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:40] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:42] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:42] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:42] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:44] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:44] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:44] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:46] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:46] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:46] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:49] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:49] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:49] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:51] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:51] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:51] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
## [18:05:53] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:53] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## [18:05:53] WARNING: src/c_api/c_api.cc:935: `ntree_limit` is deprecated, use `iteration_range` instead.
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
xgbFit
## eXtreme Gradient Boosting 
## 
## 288 samples
##   8 predictor
##   2 classes: '0', '1' 
## 
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times) 
## Summary of sample sizes: 231, 231, 231, 230, 229, 231, ... 
## Resampling results across tuning parameters:
## 
##   eta   nrounds  Accuracy   Kappa    
##   0.01   300     0.7207128  0.2936283
##   0.01   500     0.7227455  0.3105954
##   0.01  1000     0.7186435  0.3128324
##   0.01  1500     0.7117928  0.3017827
##   0.02   300     0.7214138  0.3122790
##   0.02   500     0.7166096  0.3076646
##   0.02  1000     0.7097126  0.3003599
##   0.02  1500     0.6958574  0.2736519
##   0.03   300     0.7227939  0.3220104
##   0.03   500     0.7124825  0.3041791
##   0.03  1000     0.6937763  0.2690656
##   0.03  1500     0.6902554  0.2627739
## 
## Tuning parameter 'max_depth' was held constant at a value of 2
## Tuning
## 
## Tuning parameter 'min_child_weight' was held constant at a value of 1
## 
## Tuning parameter 'subsample' was held constant at a value of 1
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were nrounds = 300, max_depth = 2, eta
##  = 0.03, gamma = 0, colsample_bytree = 1, min_child_weight = 1 and subsample
##  = 1.
xgbFit$bestTune
xgbFit.best<-xgbFit$bestTune
set.seed(16)
xgbFit1 <- train(promo~ ., data = moklas3.jk.train, 
                 method = "xgbTree", 
                 trControl = fitControl,
                 verbose = FALSE,
                 tuneGrid = xgbFit.best,
                 objective="reg:squarederror")
## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.

## Warning in check.booster.params(params, ...): The following parameters were provided multiple times:
##  objective
##   Only the last value for each of them will be used.
xgbFit1
## eXtreme Gradient Boosting 
## 
## 288 samples
##   8 predictor
##   2 classes: '0', '1' 
## 
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times) 
## Summary of sample sizes: 231, 231, 231, 230, 229, 231, ... 
## Resampling results:
## 
##   Accuracy   Kappa    
##   0.7227939  0.3220104
## 
## Tuning parameter 'nrounds' was held constant at a value of 300
## Tuning
##  held constant at a value of 1
## Tuning parameter 'subsample' was held
##  constant at a value of 1
confusionMatrix(xgbFit1$trainingData$.outcome,moklas3.jk.train$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1
##          0 192   0
##          1   0  96
##                                      
##                Accuracy : 1          
##                  95% CI : (0.9873, 1)
##     No Information Rate : 0.6667     
##     P-Value [Acc > NIR] : < 2.2e-16  
##                                      
##                   Kappa : 1          
##                                      
##  Mcnemar's Test P-Value : NA         
##                                      
##             Sensitivity : 1.0000     
##             Specificity : 1.0000     
##          Pos Pred Value : 1.0000     
##          Neg Pred Value : 1.0000     
##              Prevalence : 0.3333     
##          Detection Rate : 0.3333     
##    Detection Prevalence : 0.3333     
##       Balanced Accuracy : 1.0000     
##                                      
##        'Positive' Class : 1          
## 
## Generate predictions
y_hats_x1 <- predict(
  
  ## Random forest object
  object=xgbFit1, 
  
  ## Data to use for predictions; remove the Species
  newdata=moklas3.jk.test[, -9])
confusionMatrix(y_hats_x1,moklas3.jk.test$promo, positive="1")
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  0  1
##          0 44 14
##          1  3 10
##                                           
##                Accuracy : 0.7606          
##                  95% CI : (0.6446, 0.8539)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.04858         
##                                           
##                   Kappa : 0.3974          
##                                           
##  Mcnemar's Test P-Value : 0.01529         
##                                           
##             Sensitivity : 0.4167          
##             Specificity : 0.9362          
##          Pos Pred Value : 0.7692          
##          Neg Pred Value : 0.7586          
##              Prevalence : 0.3380          
##          Detection Rate : 0.1408          
##    Detection Prevalence : 0.1831          
##       Balanced Accuracy : 0.6764          
##                                           
##        'Positive' Class : 1               
## 
trellis.par.set(caretTheme())
plot(xgbFit,main = "Fine Tune Parameters on XGBoost",
          xlab = "nrounds",
          ylab = "RMSE" )

library(xgboost)
moklas3.jk.train.matrix<-data.matrix(moklas3.jk.train[,-9])
promo<-as.matrix(as.factor(as.character(moklas3.jk.train$promo)))
xgbModel <- xgboost(data = moklas3.jk.train.matrix, 
                    label = promo,
                    nrounds = 1000,
                    max_depth = 2,
                    eta = 0.01,
                    objective = "binary:logistic")
## [1]  train-logloss:0.690803 
## [2]  train-logloss:0.688503 
## [3]  train-logloss:0.686246 
## [4]  train-logloss:0.683958 
## [5]  train-logloss:0.681783 
## [6]  train-logloss:0.679576 
## [7]  train-logloss:0.677479 
## [8]  train-logloss:0.675350 
## [9]  train-logloss:0.673261 
## [10] train-logloss:0.671275 
## [11] train-logloss:0.669259 
## [12] train-logloss:0.667343 
## [13] train-logloss:0.665397 
## [14] train-logloss:0.663549 
## [15] train-logloss:0.661671 
## [16] train-logloss:0.659827 
## [17] train-logloss:0.658074 
## [18] train-logloss:0.656293 
## [19] train-logloss:0.654601 
## [20] train-logloss:0.652881 
## [21] train-logloss:0.651248 
## [22] train-logloss:0.649587 
## [23] train-logloss:0.647955 
## [24] train-logloss:0.646405 
## [25] train-logloss:0.644916 
## [26] train-logloss:0.643365 
## [27] train-logloss:0.641894 
## [28] train-logloss:0.640395 
## [29] train-logloss:0.639007 
## [30] train-logloss:0.637681 
## [31] train-logloss:0.636255 
## [32] train-logloss:0.634937 
## [33] train-logloss:0.633559 
## [34] train-logloss:0.632319 
## [35] train-logloss:0.631099 
## [36] train-logloss:0.629783 
## [37] train-logloss:0.628541 
## [38] train-logloss:0.627343 
## [39] train-logloss:0.626133 
## [40] train-logloss:0.624994 
## [41] train-logloss:0.623816 
## [42] train-logloss:0.622595 
## [43] train-logloss:0.621445 
## [44] train-logloss:0.620364 
## [45] train-logloss:0.619245 
## [46] train-logloss:0.618194 
## [47] train-logloss:0.617103 
## [48] train-logloss:0.615964 
## [49] train-logloss:0.614898 
## [50] train-logloss:0.613895 
## [51] train-logloss:0.612859 
## [52] train-logloss:0.611850 
## [53] train-logloss:0.610841 
## [54] train-logloss:0.609861 
## [55] train-logloss:0.608875 
## [56] train-logloss:0.607942 
## [57] train-logloss:0.606982 
## [58] train-logloss:0.605953 
## [59] train-logloss:0.605015 
## [60] train-logloss:0.604125 
## [61] train-logloss:0.603211 
## [62] train-logloss:0.602343 
## [63] train-logloss:0.601449 
## [64] train-logloss:0.600482 
## [65] train-logloss:0.599610 
## [66] train-logloss:0.598779 
## [67] train-logloss:0.597928 
## [68] train-logloss:0.597088 
## [69] train-logloss:0.596257 
## [70] train-logloss:0.595459 
## [71] train-logloss:0.594645 
## [72] train-logloss:0.593750 
## [73] train-logloss:0.592953 
## [74] train-logloss:0.592191 
## [75] train-logloss:0.590523 
## [76] train-logloss:0.588884 
## [77] train-logloss:0.588118 
## [78] train-logloss:0.586513 
## [79] train-logloss:0.585763 
## [80] train-logloss:0.585020 
## [81] train-logloss:0.583457 
## [82] train-logloss:0.582754 
## [83] train-logloss:0.581224 
## [84] train-logloss:0.579722 
## [85] train-logloss:0.578933 
## [86] train-logloss:0.577463 
## [87] train-logloss:0.576020 
## [88] train-logloss:0.575323 
## [89] train-logloss:0.573908 
## [90] train-logloss:0.572518 
## [91] train-logloss:0.571872 
## [92] train-logloss:0.570511 
## [93] train-logloss:0.569842 
## [94] train-logloss:0.568508 
## [95] train-logloss:0.567888 
## [96] train-logloss:0.566581 
## [97] train-logloss:0.565935 
## [98] train-logloss:0.564655 
## [99] train-logloss:0.563396 
## [100]    train-logloss:0.562834 
## [101]    train-logloss:0.561604 
## [102]    train-logloss:0.560395 
## [103]    train-logloss:0.559815 
## [104]    train-logloss:0.559199 
## [105]    train-logloss:0.558666 
## [106]    train-logloss:0.557493 
## [107]    train-logloss:0.556341 
## [108]    train-logloss:0.555828 
## [109]    train-logloss:0.554700 
## [110]    train-logloss:0.554109 
## [111]    train-logloss:0.553004 
## [112]    train-logloss:0.552513 
## [113]    train-logloss:0.551433 
## [114]    train-logloss:0.550896 
## [115]    train-logloss:0.549837 
## [116]    train-logloss:0.549368 
## [117]    train-logloss:0.548332 
## [118]    train-logloss:0.547769 
## [119]    train-logloss:0.546753 
## [120]    train-logloss:0.546304 
## [121]    train-logloss:0.545310 
## [122]    train-logloss:0.544334 
## [123]    train-logloss:0.543901 
## [124]    train-logloss:0.543357 
## [125]    train-logloss:0.542402 
## [126]    train-logloss:0.541462 
## [127]    train-logloss:0.541044 
## [128]    train-logloss:0.540560 
## [129]    train-logloss:0.539642 
## [130]    train-logloss:0.539117 
## [131]    train-logloss:0.538216 
## [132]    train-logloss:0.537816 
## [133]    train-logloss:0.537352 
## [134]    train-logloss:0.536471 
## [135]    train-logloss:0.536083 
## [136]    train-logloss:0.535221 
## [137]    train-logloss:0.534717 
## [138]    train-logloss:0.533871 
## [139]    train-logloss:0.533495 
## [140]    train-logloss:0.532665 
## [141]    train-logloss:0.532174 
## [142]    train-logloss:0.531360 
## [143]    train-logloss:0.530996 
## [144]    train-logloss:0.530197 
## [145]    train-logloss:0.529761 
## [146]    train-logloss:0.529406 
## [147]    train-logloss:0.528932 
## [148]    train-logloss:0.528151 
## [149]    train-logloss:0.527806 
## [150]    train-logloss:0.527040 
## [151]    train-logloss:0.526576 
## [152]    train-logloss:0.526157 
## [153]    train-logloss:0.525406 
## [154]    train-logloss:0.524949 
## [155]    train-logloss:0.524618 
## [156]    train-logloss:0.523882 
## [157]    train-logloss:0.523433 
## [158]    train-logloss:0.523110 
## [159]    train-logloss:0.522388 
## [160]    train-logloss:0.521986 
## [161]    train-logloss:0.521548 
## [162]    train-logloss:0.520838 
## [163]    train-logloss:0.520527 
## [164]    train-logloss:0.520143 
## [165]    train-logloss:0.519446 
## [166]    train-logloss:0.519018 
## [167]    train-logloss:0.518562 
## [168]    train-logloss:0.518261 
## [169]    train-logloss:0.517813 
## [170]    train-logloss:0.517434 
## [171]    train-logloss:0.517015 
## [172]    train-logloss:0.516575 
## [173]    train-logloss:0.516283 
## [174]    train-logloss:0.515849 
## [175]    train-logloss:0.515438 
## [176]    train-logloss:0.515012 
## [177]    train-logloss:0.514651 
## [178]    train-logloss:0.514366 
## [179]    train-logloss:0.513946 
## [180]    train-logloss:0.513544 
## [181]    train-logloss:0.513131 
## [182]    train-logloss:0.512780 
## [183]    train-logloss:0.512501 
## [184]    train-logloss:0.512108 
## [185]    train-logloss:0.511701 
## [186]    train-logloss:0.511427 
## [187]    train-logloss:0.511076 
## [188]    train-logloss:0.510676 
## [189]    train-logloss:0.510291 
## [190]    train-logloss:0.509897 
## [191]    train-logloss:0.509561 
## [192]    train-logloss:0.509173 
## [193]    train-logloss:0.508907 
## [194]    train-logloss:0.508528 
## [195]    train-logloss:0.508145 
## [196]    train-logloss:0.507809 
## [197]    train-logloss:0.507548 
## [198]    train-logloss:0.507177 
## [199]    train-logloss:0.506800 
## [200]    train-logloss:0.506544 
## [201]    train-logloss:0.505934 
## [202]    train-logloss:0.505614 
## [203]    train-logloss:0.505246 
## [204]    train-logloss:0.504881 
## [205]    train-logloss:0.504511 
## [206]    train-logloss:0.504149 
## [207]    train-logloss:0.503836 
## [208]    train-logloss:0.503480 
## [209]    train-logloss:0.503116 
## [210]    train-logloss:0.502765 
## [211]    train-logloss:0.502410 
## [212]    train-logloss:0.502166 
## [213]    train-logloss:0.501860 
## [214]    train-logloss:0.501514 
## [215]    train-logloss:0.501161 
## [216]    train-logloss:0.500820 
## [217]    train-logloss:0.500474 
## [218]    train-logloss:0.500167 
## [219]    train-logloss:0.499830 
## [220]    train-logloss:0.499592 
## [221]    train-logloss:0.499247 
## [222]    train-logloss:0.498909 
## [223]    train-logloss:0.498577 
## [224]    train-logloss:0.498285 
## [225]    train-logloss:0.497958 
## [226]    train-logloss:0.497620 
## [227]    train-logloss:0.497289 
## [228]    train-logloss:0.496967 
## [229]    train-logloss:0.496680 
## [230]    train-logloss:0.496349 
## [231]    train-logloss:0.496031 
## [232]    train-logloss:0.495558 
## [233]    train-logloss:0.495245 
## [234]    train-logloss:0.494924 
## [235]    train-logloss:0.494596 
## [236]    train-logloss:0.494288 
## [237]    train-logloss:0.493825 
## [238]    train-logloss:0.493508 
## [239]    train-logloss:0.493284 
## [240]    train-logloss:0.492831 
## [241]    train-logloss:0.492269 
## [242]    train-logloss:0.491953 
## [243]    train-logloss:0.491402 
## [244]    train-logloss:0.490956 
## [245]    train-logloss:0.490643 
## [246]    train-logloss:0.490344 
## [247]    train-logloss:0.490037 
## [248]    train-logloss:0.489741 
## [249]    train-logloss:0.489433 
## [250]    train-logloss:0.488999 
## [251]    train-logloss:0.488709 
## [252]    train-logloss:0.488407 
## [253]    train-logloss:0.487982 
## [254]    train-logloss:0.487679 
## [255]    train-logloss:0.487144 
## [256]    train-logloss:0.486860 
## [257]    train-logloss:0.486561 
## [258]    train-logloss:0.486144 
## [259]    train-logloss:0.485864 
## [260]    train-logloss:0.485568 
## [261]    train-logloss:0.485160 
## [262]    train-logloss:0.484884 
## [263]    train-logloss:0.484590 
## [264]    train-logloss:0.484240 
## [265]    train-logloss:0.483990 
## [266]    train-logloss:0.483718 
## [267]    train-logloss:0.483156 
## [268]    train-logloss:0.482869 
## [269]    train-logloss:0.482526 
## [270]    train-logloss:0.482280 
## [271]    train-logloss:0.482015 
## [272]    train-logloss:0.481466 
## [273]    train-logloss:0.481183 
## [274]    train-logloss:0.480645 
## [275]    train-logloss:0.480308 
## [276]    train-logloss:0.480067 
## [277]    train-logloss:0.479810 
## [278]    train-logloss:0.479284 
## [279]    train-logloss:0.479005 
## [280]    train-logloss:0.478675 
## [281]    train-logloss:0.478158 
## [282]    train-logloss:0.477921 
## [283]    train-logloss:0.477673 
## [284]    train-logloss:0.477399 
## [285]    train-logloss:0.476892 
## [286]    train-logloss:0.476570 
## [287]    train-logloss:0.476336 
## [288]    train-logloss:0.476094 
## [289]    train-logloss:0.475599 
## [290]    train-logloss:0.475329 
## [291]    train-logloss:0.475012 
## [292]    train-logloss:0.474525 
## [293]    train-logloss:0.474259 
## [294]    train-logloss:0.474029 
## [295]    train-logloss:0.473794 
## [296]    train-logloss:0.473482 
## [297]    train-logloss:0.473007 
## [298]    train-logloss:0.472745 
## [299]    train-logloss:0.472517 
## [300]    train-logloss:0.472286 
## [301]    train-logloss:0.471820 
## [302]    train-logloss:0.471513 
## [303]    train-logloss:0.471285 
## [304]    train-logloss:0.471061 
## [305]    train-logloss:0.470802 
## [306]    train-logloss:0.470441 
## [307]    train-logloss:0.470141 
## [308]    train-logloss:0.469916 
## [309]    train-logloss:0.469461 
## [310]    train-logloss:0.469207 
## [311]    train-logloss:0.468985 
## [312]    train-logloss:0.468689 
## [313]    train-logloss:0.468468 
## [314]    train-logloss:0.468117 
## [315]    train-logloss:0.467899 
## [316]    train-logloss:0.467456 
## [317]    train-logloss:0.467205 
## [318]    train-logloss:0.466987 
## [319]    train-logloss:0.466642 
## [320]    train-logloss:0.466207 
## [321]    train-logloss:0.465918 
## [322]    train-logloss:0.465671 
## [323]    train-logloss:0.465457 
## [324]    train-logloss:0.465242 
## [325]    train-logloss:0.464906 
## [326]    train-logloss:0.464622 
## [327]    train-logloss:0.464410 
## [328]    train-logloss:0.464198 
## [329]    train-logloss:0.463989 
## [330]    train-logloss:0.463565 
## [331]    train-logloss:0.463285 
## [332]    train-logloss:0.462956 
## [333]    train-logloss:0.462746 
## [334]    train-logloss:0.462539 
## [335]    train-logloss:0.462123 
## [336]    train-logloss:0.461848 
## [337]    train-logloss:0.461641 
## [338]    train-logloss:0.461437 
## [339]    train-logloss:0.461232 
## [340]    train-logloss:0.460960 
## [341]    train-logloss:0.460757 
## [342]    train-logloss:0.460350 
## [343]    train-logloss:0.460030 
## [344]    train-logloss:0.459763 
## [345]    train-logloss:0.459563 
## [346]    train-logloss:0.459361 
## [347]    train-logloss:0.459163 
## [348]    train-logloss:0.458964 
## [349]    train-logloss:0.458651 
## [350]    train-logloss:0.458387 
## [351]    train-logloss:0.458192 
## [352]    train-logloss:0.457794 
## [353]    train-logloss:0.457598 
## [354]    train-logloss:0.457291 
## [355]    train-logloss:0.457031 
## [356]    train-logloss:0.456838 
## [357]    train-logloss:0.456645 
## [358]    train-logloss:0.456454 
## [359]    train-logloss:0.456066 
## [360]    train-logloss:0.455810 
## [361]    train-logloss:0.455509 
## [362]    train-logloss:0.455319 
## [363]    train-logloss:0.455129 
## [364]    train-logloss:0.454876 
## [365]    train-logloss:0.454688 
## [366]    train-logloss:0.454415 
## [367]    train-logloss:0.454120 
## [368]    train-logloss:0.453740 
## [369]    train-logloss:0.453553 
## [370]    train-logloss:0.453304 
## [371]    train-logloss:0.453119 
## [372]    train-logloss:0.452959 
## [373]    train-logloss:0.452774 
## [374]    train-logloss:0.452616 
## [375]    train-logloss:0.452370 
## [376]    train-logloss:0.452187 
## [377]    train-logloss:0.452005 
## [378]    train-logloss:0.451718 
## [379]    train-logloss:0.451536 
## [380]    train-logloss:0.451381 
## [381]    train-logloss:0.451201 
## [382]    train-logloss:0.450959 
## [383]    train-logloss:0.450806 
## [384]    train-logloss:0.450492 
## [385]    train-logloss:0.450224 
## [386]    train-logloss:0.450073 
## [387]    train-logloss:0.449795 
## [388]    train-logloss:0.449557 
## [389]    train-logloss:0.449376 
## [390]    train-logloss:0.449199 
## [391]    train-logloss:0.449021 
## [392]    train-logloss:0.448796 
## [393]    train-logloss:0.448647 
## [394]    train-logloss:0.448471 
## [395]    train-logloss:0.448198 
## [396]    train-logloss:0.447963 
## [397]    train-logloss:0.447788 
## [398]    train-logloss:0.447525 
## [399]    train-logloss:0.447377 
## [400]    train-logloss:0.447109 
## [401]    train-logloss:0.446889 
## [402]    train-logloss:0.446658 
## [403]    train-logloss:0.446484 
## [404]    train-logloss:0.446311 
## [405]    train-logloss:0.446166 
## [406]    train-logloss:0.445903 
## [407]    train-logloss:0.445673 
## [408]    train-logloss:0.445502 
## [409]    train-logloss:0.445359 
## [410]    train-logloss:0.445143 
## [411]    train-logloss:0.444971 
## [412]    train-logloss:0.444831 
## [413]    train-logloss:0.444573 
## [414]    train-logloss:0.444359 
## [415]    train-logloss:0.444220 
## [416]    train-logloss:0.444050 
## [417]    train-logloss:0.443797 
## [418]    train-logloss:0.443626 
## [419]    train-logloss:0.443415 
## [420]    train-logloss:0.443278 
## [421]    train-logloss:0.443110 
## [422]    train-logloss:0.442975 
## [423]    train-logloss:0.442766 
## [424]    train-logloss:0.442516 
## [425]    train-logloss:0.442292 
## [426]    train-logloss:0.442035 
## [427]    train-logloss:0.441902 
## [428]    train-logloss:0.441656 
## [429]    train-logloss:0.441490 
## [430]    train-logloss:0.441320 
## [431]    train-logloss:0.441099 
## [432]    train-logloss:0.440893 
## [433]    train-logloss:0.440726 
## [434]    train-logloss:0.440474 
## [435]    train-logloss:0.440231 
## [436]    train-logloss:0.440099 
## [437]    train-logloss:0.439936 
## [438]    train-logloss:0.439798 
## [439]    train-logloss:0.439632 
## [440]    train-logloss:0.439502 
## [441]    train-logloss:0.439262 
## [442]    train-logloss:0.439059 
## [443]    train-logloss:0.438842 
## [444]    train-logloss:0.438680 
## [445]    train-logloss:0.438552 
## [446]    train-logloss:0.438316 
## [447]    train-logloss:0.437935 
## [448]    train-logloss:0.437736 
## [449]    train-logloss:0.437489 
## [450]    train-logloss:0.437363 
## [451]    train-logloss:0.436990 
## [452]    train-logloss:0.436756 
## [453]    train-logloss:0.436559 
## [454]    train-logloss:0.436328 
## [455]    train-logloss:0.435959 
## [456]    train-logloss:0.435765 
## [457]    train-logloss:0.435606 
## [458]    train-logloss:0.435484 
## [459]    train-logloss:0.435123 
## [460]    train-logloss:0.434929 
## [461]    train-logloss:0.434702 
## [462]    train-logloss:0.434458 
## [463]    train-logloss:0.434338 
## [464]    train-logloss:0.433982 
## [465]    train-logloss:0.433758 
## [466]    train-logloss:0.433628 
## [467]    train-logloss:0.433439 
## [468]    train-logloss:0.433088 
## [469]    train-logloss:0.432866 
## [470]    train-logloss:0.432710 
## [471]    train-logloss:0.432525 
## [472]    train-logloss:0.432409 
## [473]    train-logloss:0.432065 
## [474]    train-logloss:0.431878 
## [475]    train-logloss:0.431659 
## [476]    train-logloss:0.431431 
## [477]    train-logloss:0.431317 
## [478]    train-logloss:0.430978 
## [479]    train-logloss:0.430762 
## [480]    train-logloss:0.430632 
## [481]    train-logloss:0.430447 
## [482]    train-logloss:0.430266 
## [483]    train-logloss:0.430138 
## [484]    train-logloss:0.429984 
## [485]    train-logloss:0.429819 
## [486]    train-logloss:0.429692 
## [487]    train-logloss:0.429481 
## [488]    train-logloss:0.429257 
## [489]    train-logloss:0.429043 
## [490]    train-logloss:0.428865 
## [491]    train-logloss:0.428704 
## [492]    train-logloss:0.428578 
## [493]    train-logloss:0.428454 
## [494]    train-logloss:0.428295 
## [495]    train-logloss:0.428170 
## [496]    train-logloss:0.428018 
## [497]    train-logloss:0.427844 
## [498]    train-logloss:0.427720 
## [499]    train-logloss:0.427563 
## [500]    train-logloss:0.427356 
## [501]    train-logloss:0.427234 
## [502]    train-logloss:0.427003 
## [503]    train-logloss:0.426792 
## [504]    train-logloss:0.426669 
## [505]    train-logloss:0.426514 
## [506]    train-logloss:0.426310 
## [507]    train-logloss:0.426159 
## [508]    train-logloss:0.425990 
## [509]    train-logloss:0.425868 
## [510]    train-logloss:0.425716 
## [511]    train-logloss:0.425595 
## [512]    train-logloss:0.425475 
## [513]    train-logloss:0.425267 
## [514]    train-logloss:0.425099 
## [515]    train-logloss:0.424979 
## [516]    train-logloss:0.424829 
## [517]    train-logloss:0.424602 
## [518]    train-logloss:0.424401 
## [519]    train-logloss:0.424253 
## [520]    train-logloss:0.424104 
## [521]    train-logloss:0.423985 
## [522]    train-logloss:0.423779 
## [523]    train-logloss:0.423613 
## [524]    train-logloss:0.423495 
## [525]    train-logloss:0.423350 
## [526]    train-logloss:0.423232 
## [527]    train-logloss:0.423068 
## [528]    train-logloss:0.422951 
## [529]    train-logloss:0.422747 
## [530]    train-logloss:0.422549 
## [531]    train-logloss:0.422401 
## [532]    train-logloss:0.422286 
## [533]    train-logloss:0.422171 
## [534]    train-logloss:0.422025 
## [535]    train-logloss:0.421901 
## [536]    train-logloss:0.421678 
## [537]    train-logloss:0.421477 
## [538]    train-logloss:0.421316 
## [539]    train-logloss:0.421173 
## [540]    train-logloss:0.421059 
## [541]    train-logloss:0.420864 
## [542]    train-logloss:0.420720 
## [543]    train-logloss:0.420430 
## [544]    train-logloss:0.420256 
## [545]    train-logloss:0.420144 
## [546]    train-logloss:0.419945 
## [547]    train-logloss:0.419786 
## [548]    train-logloss:0.419501 
## [549]    train-logloss:0.419309 
## [550]    train-logloss:0.419168 
## [551]    train-logloss:0.419057 
## [552]    train-logloss:0.418886 
## [553]    train-logloss:0.418667 
## [554]    train-logloss:0.418471 
## [555]    train-logloss:0.418314 
## [556]    train-logloss:0.418034 
## [557]    train-logloss:0.417840 
## [558]    train-logloss:0.417730 
## [559]    train-logloss:0.417562 
## [560]    train-logloss:0.417424 
## [561]    train-logloss:0.417235 
## [562]    train-logloss:0.417081 
## [563]    train-logloss:0.416972 
## [564]    train-logloss:0.416831 
## [565]    train-logloss:0.416722 
## [566]    train-logloss:0.416530 
## [567]    train-logloss:0.416409 
## [568]    train-logloss:0.416136 
## [569]    train-logloss:0.415999 
## [570]    train-logloss:0.415847 
## [571]    train-logloss:0.415662 
## [572]    train-logloss:0.415447 
## [573]    train-logloss:0.415257 
## [574]    train-logloss:0.414989 
## [575]    train-logloss:0.414882 
## [576]    train-logloss:0.414694 
## [577]    train-logloss:0.414575 
## [578]    train-logloss:0.414392 
## [579]    train-logloss:0.414129 
## [580]    train-logloss:0.413965 
## [581]    train-logloss:0.413830 
## [582]    train-logloss:0.413680 
## [583]    train-logloss:0.413574 
## [584]    train-logloss:0.413389 
## [585]    train-logloss:0.413284 
## [586]    train-logloss:0.413121 
## [587]    train-logloss:0.412909 
## [588]    train-logloss:0.412770 
## [589]    train-logloss:0.412622 
## [590]    train-logloss:0.412518 
## [591]    train-logloss:0.412387 
## [592]    train-logloss:0.412204 
## [593]    train-logloss:0.411946 
## [594]    train-logloss:0.411765 
## [595]    train-logloss:0.411605 
## [596]    train-logloss:0.411424 
## [597]    train-logloss:0.411321 
## [598]    train-logloss:0.411191 
## [599]    train-logloss:0.411045 
## [600]    train-logloss:0.410942 
## [601]    train-logloss:0.410807 
## [602]    train-logloss:0.410706 
## [603]    train-logloss:0.410529 
## [604]    train-logloss:0.410277 
## [605]    train-logloss:0.410098 
## [606]    train-logloss:0.409889 
## [607]    train-logloss:0.409745 
## [608]    train-logloss:0.409587 
## [609]    train-logloss:0.409410 
## [610]    train-logloss:0.409282 
## [611]    train-logloss:0.409183 
## [612]    train-logloss:0.409007 
## [613]    train-logloss:0.408874 
## [614]    train-logloss:0.408626 
## [615]    train-logloss:0.408451 
## [616]    train-logloss:0.408295 
## [617]    train-logloss:0.408169 
## [618]    train-logloss:0.408027 
## [619]    train-logloss:0.407929 
## [620]    train-logloss:0.407686 
## [621]    train-logloss:0.407513 
## [622]    train-logloss:0.407341 
## [623]    train-logloss:0.407136 
## [624]    train-logloss:0.407020 
## [625]    train-logloss:0.406725 
## [626]    train-logloss:0.406586 
## [627]    train-logloss:0.406415 
## [628]    train-logloss:0.406261 
## [629]    train-logloss:0.406137 
## [630]    train-logloss:0.406041 
## [631]    train-logloss:0.405902 
## [632]    train-logloss:0.405613 
## [633]    train-logloss:0.405441 
## [634]    train-logloss:0.405272 
## [635]    train-logloss:0.405140 
## [636]    train-logloss:0.404989 
## [637]    train-logloss:0.404866 
## [638]    train-logloss:0.404771 
## [639]    train-logloss:0.404602 
## [640]    train-logloss:0.404466 
## [641]    train-logloss:0.404336 
## [642]    train-logloss:0.404104 
## [643]    train-logloss:0.403937 
## [644]    train-logloss:0.403824 
## [645]    train-logloss:0.403542 
## [646]    train-logloss:0.403393 
## [647]    train-logloss:0.403225 
## [648]    train-logloss:0.403060 
## [649]    train-logloss:0.402938 
## [650]    train-logloss:0.402844 
## [651]    train-logloss:0.402710 
## [652]    train-logloss:0.402562 
## [653]    train-logloss:0.402362 
## [654]    train-logloss:0.402197 
## [655]    train-logloss:0.401920 
## [656]    train-logloss:0.401758 
## [657]    train-logloss:0.401626 
## [658]    train-logloss:0.401507 
## [659]    train-logloss:0.401397 
## [660]    train-logloss:0.401304 
## [661]    train-logloss:0.401176 
## [662]    train-logloss:0.401010 
## [663]    train-logloss:0.400738 
## [664]    train-logloss:0.400592 
## [665]    train-logloss:0.400432 
## [666]    train-logloss:0.400301 
## [667]    train-logloss:0.400209 
## [668]    train-logloss:0.400083 
## [669]    train-logloss:0.399919 
## [670]    train-logloss:0.399722 
## [671]    train-logloss:0.399502 
## [672]    train-logloss:0.399343 
## [673]    train-logloss:0.399077 
## [674]    train-logloss:0.398960 
## [675]    train-logloss:0.398798 
## [676]    train-logloss:0.398655 
## [677]    train-logloss:0.398498 
## [678]    train-logloss:0.398369 
## [679]    train-logloss:0.398254 
## [680]    train-logloss:0.398164 
## [681]    train-logloss:0.397903 
## [682]    train-logloss:0.397761 
## [683]    train-logloss:0.397606 
## [684]    train-logloss:0.397479 
## [685]    train-logloss:0.397354 
## [686]    train-logloss:0.397245 
## [687]    train-logloss:0.397051 
## [688]    train-logloss:0.396897 
## [689]    train-logloss:0.396737 
## [690]    train-logloss:0.396648 
## [691]    train-logloss:0.396534 
## [692]    train-logloss:0.396410 
## [693]    train-logloss:0.396322 
## [694]    train-logloss:0.396065 
## [695]    train-logloss:0.395913 
## [696]    train-logloss:0.395755 
## [697]    train-logloss:0.395629 
## [698]    train-logloss:0.395491 
## [699]    train-logloss:0.395378 
## [700]    train-logloss:0.395291 
## [701]    train-logloss:0.395084 
## [702]    train-logloss:0.394933 
## [703]    train-logloss:0.394808 
## [704]    train-logloss:0.394558 
## [705]    train-logloss:0.394366 
## [706]    train-logloss:0.394216 
## [707]    train-logloss:0.394110 
## [708]    train-logloss:0.393863 
## [709]    train-logloss:0.393727 
## [710]    train-logloss:0.393571 
## [711]    train-logloss:0.393448 
## [712]    train-logloss:0.393301 
## [713]    train-logloss:0.393177 
## [714]    train-logloss:0.393065 
## [715]    train-logloss:0.392980 
## [716]    train-logloss:0.392857 
## [717]    train-logloss:0.392711 
## [718]    train-logloss:0.392576 
## [719]    train-logloss:0.392455 
## [720]    train-logloss:0.392370 
## [721]    train-logloss:0.392128 
## [722]    train-logloss:0.392018 
## [723]    train-logloss:0.391896 
## [724]    train-logloss:0.391812 
## [725]    train-logloss:0.391658 
## [726]    train-logloss:0.391538 
## [727]    train-logloss:0.391299 
## [728]    train-logloss:0.391155 
## [729]    train-logloss:0.391050 
## [730]    train-logloss:0.390930 
## [731]    train-logloss:0.390732 
## [732]    train-logloss:0.390544 
## [733]    train-logloss:0.390350 
## [734]    train-logloss:0.390207 
## [735]    train-logloss:0.390054 
## [736]    train-logloss:0.389922 
## [737]    train-logloss:0.389813 
## [738]    train-logloss:0.389672 
## [739]    train-logloss:0.389589 
## [740]    train-logloss:0.389470 
## [741]    train-logloss:0.389237 
## [742]    train-logloss:0.389106 
## [743]    train-logloss:0.388878 
## [744]    train-logloss:0.388738 
## [745]    train-logloss:0.388631 
## [746]    train-logloss:0.388513 
## [747]    train-logloss:0.388411 
## [748]    train-logloss:0.388218 
## [749]    train-logloss:0.388100 
## [750]    train-logloss:0.387915 
## [751]    train-logloss:0.387685 
## [752]    train-logloss:0.387547 
## [753]    train-logloss:0.387395 
## [754]    train-logloss:0.387313 
## [755]    train-logloss:0.387089 
## [756]    train-logloss:0.386972 
## [757]    train-logloss:0.386867 
## [758]    train-logloss:0.386786 
## [759]    train-logloss:0.386649 
## [760]    train-logloss:0.386532 
## [761]    train-logloss:0.386415 
## [762]    train-logloss:0.386335 
## [763]    train-logloss:0.386110 
## [764]    train-logloss:0.386009 
## [765]    train-logloss:0.385859 
## [766]    train-logloss:0.385639 
## [767]    train-logloss:0.385503 
## [768]    train-logloss:0.385399 
## [769]    train-logloss:0.385283 
## [770]    train-logloss:0.385204 
## [771]    train-logloss:0.385088 
## [772]    train-logloss:0.384973 
## [773]    train-logloss:0.384792 
## [774]    train-logloss:0.384657 
## [775]    train-logloss:0.384578 
## [776]    train-logloss:0.384430 
## [777]    train-logloss:0.384330 
## [778]    train-logloss:0.384147 
## [779]    train-logloss:0.384014 
## [780]    train-logloss:0.383797 
## [781]    train-logloss:0.383694 
## [782]    train-logloss:0.383581 
## [783]    train-logloss:0.383503 
## [784]    train-logloss:0.383371 
## [785]    train-logloss:0.383258 
## [786]    train-logloss:0.383157 
## [787]    train-logloss:0.382943 
## [788]    train-logloss:0.382866 
## [789]    train-logloss:0.382751 
## [790]    train-logloss:0.382533 
## [791]    train-logloss:0.382435 
## [792]    train-logloss:0.382304 
## [793]    train-logloss:0.382193 
## [794]    train-logloss:0.382046 
## [795]    train-logloss:0.381867 
## [796]    train-logloss:0.381737 
## [797]    train-logloss:0.381661 
## [798]    train-logloss:0.381515 
## [799]    train-logloss:0.381404 
## [800]    train-logloss:0.381304 
## [801]    train-logloss:0.381094 
## [802]    train-logloss:0.380997 
## [803]    train-logloss:0.380883 
## [804]    train-logloss:0.380697 
## [805]    train-logloss:0.380622 
## [806]    train-logloss:0.380409 
## [807]    train-logloss:0.380280 
## [808]    train-logloss:0.380170 
## [809]    train-logloss:0.379994 
## [810]    train-logloss:0.379867 
## [811]    train-logloss:0.379792 
## [812]    train-logloss:0.379679 
## [813]    train-logloss:0.379469 
## [814]    train-logloss:0.379359 
## [815]    train-logloss:0.379261 
## [816]    train-logloss:0.379055 
## [817]    train-logloss:0.378928 
## [818]    train-logloss:0.378746 
## [819]    train-logloss:0.378602 
## [820]    train-logloss:0.378505 
## [821]    train-logloss:0.378397 
## [822]    train-logloss:0.378323 
## [823]    train-logloss:0.378211 
## [824]    train-logloss:0.378116 
## [825]    train-logloss:0.377943 
## [826]    train-logloss:0.377817 
## [827]    train-logloss:0.377743 
## [828]    train-logloss:0.377537 
## [829]    train-logloss:0.377394 
## [830]    train-logloss:0.377287 
## [831]    train-logloss:0.377116 
## [832]    train-logloss:0.376991 
## [833]    train-logloss:0.376885 
## [834]    train-logloss:0.376762 
## [835]    train-logloss:0.376583 
## [836]    train-logloss:0.376488 
## [837]    train-logloss:0.376377 
## [838]    train-logloss:0.376304 
## [839]    train-logloss:0.376199 
## [840]    train-logloss:0.376127 
## [841]    train-logloss:0.375957 
## [842]    train-logloss:0.375815 
## [843]    train-logloss:0.375693 
## [844]    train-logloss:0.375524 
## [845]    train-logloss:0.375402 
## [846]    train-logloss:0.375201 
## [847]    train-logloss:0.375080 
## [848]    train-logloss:0.374975 
## [849]    train-logloss:0.374903 
## [850]    train-logloss:0.374809 
## [851]    train-logloss:0.374669 
## [852]    train-logloss:0.374565 
## [853]    train-logloss:0.374364 
## [854]    train-logloss:0.374167 
## [855]    train-logloss:0.374057 
## [856]    train-logloss:0.373964 
## [857]    train-logloss:0.373843 
## [858]    train-logloss:0.373773 
## [859]    train-logloss:0.373670 
## [860]    train-logloss:0.373576 
## [861]    train-logloss:0.373381 
## [862]    train-logloss:0.373288 
## [863]    train-logloss:0.373187 
## [864]    train-logloss:0.373067 
## [865]    train-logloss:0.372901 
## [866]    train-logloss:0.372831 
## [867]    train-logloss:0.372722 
## [868]    train-logloss:0.372525 
## [869]    train-logloss:0.372407 
## [870]    train-logloss:0.372234 
## [871]    train-logloss:0.372069 
## [872]    train-logloss:0.371968 
## [873]    train-logloss:0.371830 
## [874]    train-logloss:0.371738 
## [875]    train-logloss:0.371631 
## [876]    train-logloss:0.371437 
## [877]    train-logloss:0.371368 
## [878]    train-logloss:0.371267 
## [879]    train-logloss:0.371150 
## [880]    train-logloss:0.371081 
## [881]    train-logloss:0.370890 
## [882]    train-logloss:0.370800 
## [883]    train-logloss:0.370700 
## [884]    train-logloss:0.370584 
## [885]    train-logloss:0.370516 
## [886]    train-logloss:0.370425 
## [887]    train-logloss:0.370255 
## [888]    train-logloss:0.370149 
## [889]    train-logloss:0.369958 
## [890]    train-logloss:0.369821 
## [891]    train-logloss:0.369722 
## [892]    train-logloss:0.369655 
## [893]    train-logloss:0.369496 
## [894]    train-logloss:0.369407 
## [895]    train-logloss:0.369219 
## [896]    train-logloss:0.369104 
## [897]    train-logloss:0.369006 
## [898]    train-logloss:0.368939 
## [899]    train-logloss:0.368803 
## [900]    train-logloss:0.368641 
## [901]    train-logloss:0.368526 
## [902]    train-logloss:0.368410 
## [903]    train-logloss:0.368304 
## [904]    train-logloss:0.368144 
## [905]    train-logloss:0.368030 
## [906]    train-logloss:0.367963 
## [907]    train-logloss:0.367876 
## [908]    train-logloss:0.367779 
## [909]    train-logloss:0.367689 
## [910]    train-logloss:0.367523 
## [911]    train-logloss:0.367418 
## [912]    train-logloss:0.367352 
## [913]    train-logloss:0.367167 
## [914]    train-logloss:0.367033 
## [915]    train-logloss:0.366919 
## [916]    train-logloss:0.366761 
## [917]    train-logloss:0.366649 
## [918]    train-logloss:0.366534 
## [919]    train-logloss:0.366448 
## [920]    train-logloss:0.366345 
## [921]    train-logloss:0.366231 
## [922]    train-logloss:0.366076 
## [923]    train-logloss:0.365894 
## [924]    train-logloss:0.365738 
## [925]    train-logloss:0.365626 
## [926]    train-logloss:0.365523 
## [927]    train-logloss:0.365342 
## [928]    train-logloss:0.365209 
## [929]    train-logloss:0.365120 
## [930]    train-logloss:0.365024 
## [931]    train-logloss:0.364844 
## [932]    train-logloss:0.364759 
## [933]    train-logloss:0.364694 
## [934]    train-logloss:0.364516 
## [935]    train-logloss:0.364405 
## [936]    train-logloss:0.364302 
## [937]    train-logloss:0.364215 
## [938]    train-logloss:0.364052 
## [939]    train-logloss:0.363903 
## [940]    train-logloss:0.363819 
## [941]    train-logloss:0.363645 
## [942]    train-logloss:0.363467 
## [943]    train-logloss:0.363373 
## [944]    train-logloss:0.363309 
## [945]    train-logloss:0.363208 
## [946]    train-logloss:0.363076 
## [947]    train-logloss:0.362904 
## [948]    train-logloss:0.362821 
## [949]    train-logloss:0.362711 
## [950]    train-logloss:0.362536 
## [951]    train-logloss:0.362383 
## [952]    train-logloss:0.362272 
## [953]    train-logloss:0.362172 
## [954]    train-logloss:0.362002 
## [955]    train-logloss:0.361915 
## [956]    train-logloss:0.361785 
## [957]    train-logloss:0.361618 
## [958]    train-logloss:0.361536 
## [959]    train-logloss:0.361427 
## [960]    train-logloss:0.361327 
## [961]    train-logloss:0.361264 
## [962]    train-logloss:0.361124 
## [963]    train-logloss:0.361007 
## [964]    train-logloss:0.360898 
## [965]    train-logloss:0.360746 
## [966]    train-logloss:0.360582 
## [967]    train-logloss:0.360452 
## [968]    train-logloss:0.360343 
## [969]    train-logloss:0.360250 
## [970]    train-logloss:0.360169 
## [971]    train-logloss:0.360107 
## [972]    train-logloss:0.359936 
## [973]    train-logloss:0.359828 
## [974]    train-logloss:0.359730 
## [975]    train-logloss:0.359580 
## [976]    train-logloss:0.359418 
## [977]    train-logloss:0.359260 
## [978]    train-logloss:0.359181 
## [979]    train-logloss:0.359083 
## [980]    train-logloss:0.359021 
## [981]    train-logloss:0.358861 
## [982]    train-logloss:0.358769 
## [983]    train-logloss:0.358709 
## [984]    train-logloss:0.358581 
## [985]    train-logloss:0.358412 
## [986]    train-logloss:0.358307 
## [987]    train-logloss:0.358228 
## [988]    train-logloss:0.358130 
## [989]    train-logloss:0.358046 
## [990]    train-logloss:0.357911 
## [991]    train-logloss:0.357745 
## [992]    train-logloss:0.357589 
## [993]    train-logloss:0.357510 
## [994]    train-logloss:0.357420 
## [995]    train-logloss:0.357273 
## [996]    train-logloss:0.357164 
## [997]    train-logloss:0.357068 
## [998]    train-logloss:0.356913 
## [999]    train-logloss:0.356787 
## [1000]   train-logloss:0.356727
## Plot Train error.

plot(xgbModel$evaluation_log, type = "l")

## Plot feature importance
importance <- xgb.importance(model = xgbModel)

xgb.plot.importance(importance)

## Make predictions on test data 
moklas3.jk.test.matrix<-data.matrix(moklas3.jk.test[,-9])
promo.test<-as.matrix(as.factor(as.character(moklas3.jk.test$promo)))
predicted <- predict(xgbModel,moklas3.jk.test.matrix )

predicted <- ifelse(predicted > 0.5 , 1,0)

## Create confusion matrix

confusionMatrix(table(predicted = predicted, actual = promo.test))
## Confusion Matrix and Statistics
## 
##          actual
## predicted  0  1
##         0 38 17
##         1  9  7
##                                         
##                Accuracy : 0.6338        
##                  95% CI : (0.511, 0.745)
##     No Information Rate : 0.662         
##     P-Value [Acc > NIR] : 0.7373        
##                                         
##                   Kappa : 0.1091        
##                                         
##  Mcnemar's Test P-Value : 0.1698        
##                                         
##             Sensitivity : 0.8085        
##             Specificity : 0.2917        
##          Pos Pred Value : 0.6909        
##          Neg Pred Value : 0.4375        
##              Prevalence : 0.6620        
##          Detection Rate : 0.5352        
##    Detection Prevalence : 0.7746        
##       Balanced Accuracy : 0.5501        
##                                         
##        'Positive' Class : 0             
## 

RUS

library(xgboost)
down.train.matrix<-data.matrix(down_train[,-(9:10)])
promo1<-as.matrix(as.factor(as.character(down_train$promo)))
xgbModel1 <- xgboost(data = down.train.matrix, 
                    label = promo1,
                    nrounds = 1000,
                    max_depth = 2,
                    eta = 0.01,
                    objective = "binary:logistic")
## [1]  train-logloss:0.691789 
## [2]  train-logloss:0.690456 
## [3]  train-logloss:0.689148 
## [4]  train-logloss:0.687865 
## [5]  train-logloss:0.686605 
## [6]  train-logloss:0.685368 
## [7]  train-logloss:0.683980 
## [8]  train-logloss:0.682616 
## [9]  train-logloss:0.681411 
## [10] train-logloss:0.680075 
## [11] train-logloss:0.678897 
## [12] train-logloss:0.677589 
## [13] train-logloss:0.676304 
## [14] train-logloss:0.675155 
## [15] train-logloss:0.673896 
## [16] train-logloss:0.672773 
## [17] train-logloss:0.671539 
## [18] train-logloss:0.670327 
## [19] train-logloss:0.669229 
## [20] train-logloss:0.668041 
## [21] train-logloss:0.666966 
## [22] train-logloss:0.665802 
## [23] train-logloss:0.664658 
## [24] train-logloss:0.663608 
## [25] train-logloss:0.662487 
## [26] train-logloss:0.661457 
## [27] train-logloss:0.660358 
## [28] train-logloss:0.659277 
## [29] train-logloss:0.658272 
## [30] train-logloss:0.657213 
## [31] train-logloss:0.656227 
## [32] train-logloss:0.655188 
## [33] train-logloss:0.654161 
## [34] train-logloss:0.653201 
## [35] train-logloss:0.652240 
## [36] train-logloss:0.651301 
## [37] train-logloss:0.650299 
## [38] train-logloss:0.649314 
## [39] train-logloss:0.648398 
## [40] train-logloss:0.647472 
## [41] train-logloss:0.646575 
## [42] train-logloss:0.645662 
## [43] train-logloss:0.644783 
## [44] train-logloss:0.643888 
## [45] train-logloss:0.643027 
## [46] train-logloss:0.642143 
## [47] train-logloss:0.641274 
## [48] train-logloss:0.640433 
## [49] train-logloss:0.639576 
## [50] train-logloss:0.638718 
## [51] train-logloss:0.637875 
## [52] train-logloss:0.637181 
## [53] train-logloss:0.636285 
## [54] train-logloss:0.635606 
## [55] train-logloss:0.634938 
## [56] train-logloss:0.634059 
## [57] train-logloss:0.633400 
## [58] train-logloss:0.632612 
## [59] train-logloss:0.631965 
## [60] train-logloss:0.631101 
## [61] train-logloss:0.630463 
## [62] train-logloss:0.629696 
## [63] train-logloss:0.628905 
## [64] train-logloss:0.628280 
## [65] train-logloss:0.627528 
## [66] train-logloss:0.626754 
## [67] train-logloss:0.626141 
## [68] train-logloss:0.625364 
## [69] train-logloss:0.624532 
## [70] train-logloss:0.623931 
## [71] train-logloss:0.623339 
## [72] train-logloss:0.622517 
## [73] train-logloss:0.621795 
## [74] train-logloss:0.621211 
## [75] train-logloss:0.620405 
## [76] train-logloss:0.619828 
## [77] train-logloss:0.619123 
## [78] train-logloss:0.618555 
## [79] train-logloss:0.617826 
## [80] train-logloss:0.617134 
## [81] train-logloss:0.616576 
## [82] train-logloss:0.615862 
## [83] train-logloss:0.615184 
## [84] train-logloss:0.614636 
## [85] train-logloss:0.613863 
## [86] train-logloss:0.613321 
## [87] train-logloss:0.612658 
## [88] train-logloss:0.612127 
## [89] train-logloss:0.611437 
## [90] train-logloss:0.610785 
## [91] train-logloss:0.610263 
## [92] train-logloss:0.609587 
## [93] train-logloss:0.608900 
## [94] train-logloss:0.608385 
## [95] train-logloss:0.607643 
## [96] train-logloss:0.607132 
## [97] train-logloss:0.606505 
## [98] train-logloss:0.606002 
## [99] train-logloss:0.605272 
## [100]    train-logloss:0.604775 
## [101]    train-logloss:0.604124 
## [102]    train-logloss:0.603513 
## [103]    train-logloss:0.603023 
## [104]    train-logloss:0.602537 
## [105]    train-logloss:0.601936 
## [106]    train-logloss:0.601302 
## [107]    train-logloss:0.600824 
## [108]    train-logloss:0.600234 
## [109]    train-logloss:0.599674 
## [110]    train-logloss:0.599201 
## [111]    train-logloss:0.598731 
## [112]    train-logloss:0.598035 
## [113]    train-logloss:0.597457 
## [114]    train-logloss:0.596993 
## [115]    train-logloss:0.595772 
## [116]    train-logloss:0.595228 
## [117]    train-logloss:0.594775 
## [118]    train-logloss:0.594099 
## [119]    train-logloss:0.592909 
## [120]    train-logloss:0.592465 
## [121]    train-logloss:0.592024 
## [122]    train-logloss:0.590864 
## [123]    train-logloss:0.590274 
## [124]    train-logloss:0.589843 
## [125]    train-logloss:0.589292 
## [126]    train-logloss:0.588773 
## [127]    train-logloss:0.587641 
## [128]    train-logloss:0.587218 
## [129]    train-logloss:0.586110 
## [130]    train-logloss:0.585696 
## [131]    train-logloss:0.585057 
## [132]    train-logloss:0.583976 
## [133]    train-logloss:0.583571 
## [134]    train-logloss:0.583167 
## [135]    train-logloss:0.582664 
## [136]    train-logloss:0.581610 
## [137]    train-logloss:0.581216 
## [138]    train-logloss:0.580692 
## [139]    train-logloss:0.580140 
## [140]    train-logloss:0.579749 
## [141]    train-logloss:0.578723 
## [142]    train-logloss:0.577715 
## [143]    train-logloss:0.577336 
## [144]    train-logloss:0.576732 
## [145]    train-logloss:0.575747 
## [146]    train-logloss:0.575374 
## [147]    train-logloss:0.574895 
## [148]    train-logloss:0.573929 
## [149]    train-logloss:0.573564 
## [150]    train-logloss:0.573061 
## [151]    train-logloss:0.572700 
## [152]    train-logloss:0.571756 
## [153]    train-logloss:0.570828 
## [154]    train-logloss:0.570473 
## [155]    train-logloss:0.569885 
## [156]    train-logloss:0.569360 
## [157]    train-logloss:0.568450 
## [158]    train-logloss:0.568104 
## [159]    train-logloss:0.567535 
## [160]    train-logloss:0.566959 
## [161]    train-logloss:0.566068 
## [162]    train-logloss:0.565727 
## [163]    train-logloss:0.565215 
## [164]    train-logloss:0.564648 
## [165]    train-logloss:0.564164 
## [166]    train-logloss:0.563288 
## [167]    train-logloss:0.562956 
## [168]    train-logloss:0.562453 
## [169]    train-logloss:0.562125 
## [170]    train-logloss:0.561649 
## [171]    train-logloss:0.561095 
## [172]    train-logloss:0.560769 
## [173]    train-logloss:0.559915 
## [174]    train-logloss:0.559422 
## [175]    train-logloss:0.558581 
## [176]    train-logloss:0.558038 
## [177]    train-logloss:0.557569 
## [178]    train-logloss:0.557252 
## [179]    train-logloss:0.556768 
## [180]    train-logloss:0.555942 
## [181]    train-logloss:0.555410 
## [182]    train-logloss:0.554949 
## [183]    train-logloss:0.554136 
## [184]    train-logloss:0.553828 
## [185]    train-logloss:0.553354 
## [186]    train-logloss:0.553049 
## [187]    train-logloss:0.552527 
## [188]    train-logloss:0.552075 
## [189]    train-logloss:0.551772 
## [190]    train-logloss:0.550978 
## [191]    train-logloss:0.550513 
## [192]    train-logloss:0.549732 
## [193]    train-logloss:0.549220 
## [194]    train-logloss:0.548926 
## [195]    train-logloss:0.548480 
## [196]    train-logloss:0.548024 
## [197]    train-logloss:0.547256 
## [198]    train-logloss:0.546968 
## [199]    train-logloss:0.546466 
## [200]    train-logloss:0.546028 
## [201]    train-logloss:0.545742 
## [202]    train-logloss:0.544991 
## [203]    train-logloss:0.544543 
## [204]    train-logloss:0.544051 
## [205]    train-logloss:0.543312 
## [206]    train-logloss:0.542871 
## [207]    train-logloss:0.542592 
## [208]    train-logloss:0.542161 
## [209]    train-logloss:0.541434 
## [210]    train-logloss:0.540952 
## [211]    train-logloss:0.540679 
## [212]    train-logloss:0.540247 
## [213]    train-logloss:0.539823 
## [214]    train-logloss:0.539108 
## [215]    train-logloss:0.538840 
## [216]    train-logloss:0.538367 
## [217]    train-logloss:0.537943 
## [218]    train-logloss:0.537677 
## [219]    train-logloss:0.536978 
## [220]    train-logloss:0.536514 
## [221]    train-logloss:0.536097 
## [222]    train-logloss:0.535407 
## [223]    train-logloss:0.535148 
## [224]    train-logloss:0.534731 
## [225]    train-logloss:0.534475 
## [226]    train-logloss:0.534065 
## [227]    train-logloss:0.533610 
## [228]    train-logloss:0.532933 
## [229]    train-logloss:0.532524 
## [230]    train-logloss:0.532274 
## [231]    train-logloss:0.531610 
## [232]    train-logloss:0.531164 
## [233]    train-logloss:0.530761 
## [234]    train-logloss:0.530109 
## [235]    train-logloss:0.529863 
## [236]    train-logloss:0.529458 
## [237]    train-logloss:0.529019 
## [238]    train-logloss:0.528376 
## [239]    train-logloss:0.527962 
## [240]    train-logloss:0.527331 
## [241]    train-logloss:0.526936 
## [242]    train-logloss:0.526696 
## [243]    train-logloss:0.526290 
## [244]    train-logloss:0.525913 
## [245]    train-logloss:0.525484 
## [246]    train-logloss:0.525095 
## [247]    train-logloss:0.524476 
## [248]    train-logloss:0.524078 
## [249]    train-logloss:0.523707 
## [250]    train-logloss:0.523471 
## [251]    train-logloss:0.522861 
## [252]    train-logloss:0.522467 
## [253]    train-logloss:0.522040 
## [254]    train-logloss:0.521442 
## [255]    train-logloss:0.521022 
## [256]    train-logloss:0.520433 
## [257]    train-logloss:0.520068 
## [258]    train-logloss:0.519687 
## [259]    train-logloss:0.519108 
## [260]    train-logloss:0.518694 
## [261]    train-logloss:0.518124 
## [262]    train-logloss:0.517754 
## [263]    train-logloss:0.517395 
## [264]    train-logloss:0.516834 
## [265]    train-logloss:0.516373 
## [266]    train-logloss:0.515964 
## [267]    train-logloss:0.515585 
## [268]    train-logloss:0.515234 
## [269]    train-logloss:0.515004 
## [270]    train-logloss:0.514449 
## [271]    train-logloss:0.514090 
## [272]    train-logloss:0.513718 
## [273]    train-logloss:0.513173 
## [274]    train-logloss:0.512769 
## [275]    train-logloss:0.512423 
## [276]    train-logloss:0.511977 
## [277]    train-logloss:0.511609 
## [278]    train-logloss:0.511071 
## [279]    train-logloss:0.510732 
## [280]    train-logloss:0.510382 
## [281]    train-logloss:0.509986 
## [282]    train-logloss:0.509761 
## [283]    train-logloss:0.509230 
## [284]    train-logloss:0.508897 
## [285]    train-logloss:0.508535 
## [286]    train-logloss:0.508144 
## [287]    train-logloss:0.507816 
## [288]    train-logloss:0.507292 
## [289]    train-logloss:0.507071 
## [290]    train-logloss:0.506642 
## [291]    train-logloss:0.506286 
## [292]    train-logloss:0.505964 
## [293]    train-logloss:0.505579 
## [294]    train-logloss:0.505061 
## [295]    train-logloss:0.504723 
## [296]    train-logloss:0.504407 
## [297]    train-logloss:0.504057 
## [298]    train-logloss:0.503545 
## [299]    train-logloss:0.503214 
## [300]    train-logloss:0.502834 
## [301]    train-logloss:0.502619 
## [302]    train-logloss:0.502308 
## [303]    train-logloss:0.501803 
## [304]    train-logloss:0.501458 
## [305]    train-logloss:0.501152 
## [306]    train-logloss:0.500940 
## [307]    train-logloss:0.500567 
## [308]    train-logloss:0.500067 
## [309]    train-logloss:0.499701 
## [310]    train-logloss:0.499399 
## [311]    train-logloss:0.498907 
## [312]    train-logloss:0.498585 
## [313]    train-logloss:0.498376 
## [314]    train-logloss:0.498006 
## [315]    train-logloss:0.497709 
## [316]    train-logloss:0.497373 
## [317]    train-logloss:0.496887 
## [318]    train-logloss:0.496572 
## [319]    train-logloss:0.496207 
## [320]    train-logloss:0.495915 
## [321]    train-logloss:0.495583 
## [322]    train-logloss:0.495103 
## [323]    train-logloss:0.494898 
## [324]    train-logloss:0.494588 
## [325]    train-logloss:0.494116 
## [326]    train-logloss:0.493828 
## [327]    train-logloss:0.493467 
## [328]    train-logloss:0.493141 
## [329]    train-logloss:0.492857 
## [330]    train-logloss:0.492390 
## [331]    train-logloss:0.492085 
## [332]    train-logloss:0.491884 
## [333]    train-logloss:0.491530 
## [334]    train-logloss:0.491250 
## [335]    train-logloss:0.490788 
## [336]    train-logloss:0.490467 
## [337]    train-logloss:0.490167 
## [338]    train-logloss:0.489892 
## [339]    train-logloss:0.489694 
## [340]    train-logloss:0.489238 
## [341]    train-logloss:0.488897 
## [342]    train-logloss:0.488625 
## [343]    train-logloss:0.488176 
## [344]    train-logloss:0.487882 
## [345]    train-logloss:0.487534 
## [346]    train-logloss:0.487220 
## [347]    train-logloss:0.486775 
## [348]    train-logloss:0.486507 
## [349]    train-logloss:0.486313 
## [350]    train-logloss:0.485967 
## [351]    train-logloss:0.485658 
## [352]    train-logloss:0.485395 
## [353]    train-logloss:0.484955 
## [354]    train-logloss:0.484668 
## [355]    train-logloss:0.484477 
## [356]    train-logloss:0.484149 
## [357]    train-logloss:0.483715 
## [358]    train-logloss:0.483456 
## [359]    train-logloss:0.483173 
## [360]    train-logloss:0.482745 
## [361]    train-logloss:0.482405 
## [362]    train-logloss:0.482149 
## [363]    train-logloss:0.481846 
## [364]    train-logloss:0.481657 
## [365]    train-logloss:0.481405 
## [366]    train-logloss:0.480982 
## [367]    train-logloss:0.480702 
## [368]    train-logloss:0.480405 
## [369]    train-logloss:0.480069 
## [370]    train-logloss:0.479821 
## [371]    train-logloss:0.479401 
## [372]    train-logloss:0.479216 
## [373]    train-logloss:0.478902 
## [374]    train-logloss:0.478490 
## [375]    train-logloss:0.478215 
## [376]    train-logloss:0.477970 
## [377]    train-logloss:0.477639 
## [378]    train-logloss:0.477456 
## [379]    train-logloss:0.477215 
## [380]    train-logloss:0.476923 
## [381]    train-logloss:0.476514 
## [382]    train-logloss:0.476245 
## [383]    train-logloss:0.475864 
## [384]    train-logloss:0.475460 
## [385]    train-logloss:0.475222 
## [386]    train-logloss:0.475042 
## [387]    train-logloss:0.474755 
## [388]    train-logloss:0.474520 
## [389]    train-logloss:0.474195 
## [390]    train-logloss:0.473795 
## [391]    train-logloss:0.473530 
## [392]    train-logloss:0.473299 
## [393]    train-logloss:0.473122 
## [394]    train-logloss:0.472749 
## [395]    train-logloss:0.472353 
## [396]    train-logloss:0.472071 
## [397]    train-logloss:0.471750 
## [398]    train-logloss:0.471522 
## [399]    train-logloss:0.471348 
## [400]    train-logloss:0.470982 
## [401]    train-logloss:0.470590 
## [402]    train-logloss:0.470365 
## [403]    train-logloss:0.470193 
## [404]    train-logloss:0.469877 
## [405]    train-logloss:0.469618 
## [406]    train-logloss:0.469342 
## [407]    train-logloss:0.468954 
## [408]    train-logloss:0.468732 
## [409]    train-logloss:0.468459 
## [410]    train-logloss:0.468076 
## [411]    train-logloss:0.467764 
## [412]    train-logloss:0.467509 
## [413]    train-logloss:0.467290 
## [414]    train-logloss:0.467122 
## [415]    train-logloss:0.466905 
## [416]    train-logloss:0.466596 
## [417]    train-logloss:0.466216 
## [418]    train-logloss:0.465948 
## [419]    train-logloss:0.465734 
## [420]    train-logloss:0.465568 
## [421]    train-logloss:0.465262 
## [422]    train-logloss:0.464887 
## [423]    train-logloss:0.464590 
## [424]    train-logloss:0.464235 
## [425]    train-logloss:0.464023 
## [426]    train-logloss:0.463859 
## [427]    train-logloss:0.463488 
## [428]    train-logloss:0.463240 
## [429]    train-logloss:0.463030 
## [430]    train-logloss:0.462682 
## [431]    train-logloss:0.462315 
## [432]    train-logloss:0.462154 
## [433]    train-logloss:0.461852 
## [434]    train-logloss:0.461645 
## [435]    train-logloss:0.461383 
## [436]    train-logloss:0.461224 
## [437]    train-logloss:0.460981 
## [438]    train-logloss:0.460617 
## [439]    train-logloss:0.460412 
## [440]    train-logloss:0.460114 
## [441]    train-logloss:0.459956 
## [442]    train-logloss:0.459670 
## [443]    train-logloss:0.459310 
## [444]    train-logloss:0.459107 
## [445]    train-logloss:0.458868 
## [446]    train-logloss:0.458529 
## [447]    train-logloss:0.458174 
## [448]    train-logloss:0.457973 
## [449]    train-logloss:0.457681 
## [450]    train-logloss:0.457525 
## [451]    train-logloss:0.457271 
## [452]    train-logloss:0.457072 
## [453]    train-logloss:0.456783 
## [454]    train-logloss:0.456430 
## [455]    train-logloss:0.456277 
## [456]    train-logloss:0.456042 
## [457]    train-logloss:0.455756 
## [458]    train-logloss:0.455560 
## [459]    train-logloss:0.455409 
## [460]    train-logloss:0.455060 
## [461]    train-logloss:0.454782 
## [462]    train-logloss:0.454588 
## [463]    train-logloss:0.454357 
## [464]    train-logloss:0.454012 
## [465]    train-logloss:0.453729 
## [466]    train-logloss:0.453580 
## [467]    train-logloss:0.453388 
## [468]    train-logloss:0.453115 
## [469]    train-logloss:0.452967 
## [470]    train-logloss:0.452777 
## [471]    train-logloss:0.452499 
## [472]    train-logloss:0.452156 
## [473]    train-logloss:0.451929 
## [474]    train-logloss:0.451684 
## [475]    train-logloss:0.451345 
## [476]    train-logloss:0.451156 
## [477]    train-logloss:0.450881 
## [478]    train-logloss:0.450735 
## [479]    train-logloss:0.450511 
## [480]    train-logloss:0.450325 
## [481]    train-logloss:0.450054 
## [482]    train-logloss:0.449717 
## [483]    train-logloss:0.449573 
## [484]    train-logloss:0.449310 
## [485]    train-logloss:0.449125 
## [486]    train-logloss:0.448792 
## [487]    train-logloss:0.448573 
## [488]    train-logloss:0.448333 
## [489]    train-logloss:0.448064 
## [490]    train-logloss:0.447922 
## [491]    train-logloss:0.447740 
## [492]    train-logloss:0.447409 
## [493]    train-logloss:0.447144 
## [494]    train-logloss:0.446927 
## [495]    train-logloss:0.446786 
## [496]    train-logloss:0.446606 
## [497]    train-logloss:0.446371 
## [498]    train-logloss:0.446044 
## [499]    train-logloss:0.445865 
## [500]    train-logloss:0.445603 
## [501]    train-logloss:0.445464 
## [502]    train-logloss:0.445209 
## [503]    train-logloss:0.445031 
## [504]    train-logloss:0.444772 
## [505]    train-logloss:0.444447 
## [506]    train-logloss:0.444235 
## [507]    train-logloss:0.444098 
## [508]    train-logloss:0.443841 
## [509]    train-logloss:0.443520 
## [510]    train-logloss:0.443345 
## [511]    train-logloss:0.443115 
## [512]    train-logloss:0.442980 
## [513]    train-logloss:0.442728 
## [514]    train-logloss:0.442555 
## [515]    train-logloss:0.442300 
## [516]    train-logloss:0.442092 
## [517]    train-logloss:0.441773 
## [518]    train-logloss:0.441602 
## [519]    train-logloss:0.441351 
## [520]    train-logloss:0.441217 
## [521]    train-logloss:0.441012 
## [522]    train-logloss:0.440696 
## [523]    train-logloss:0.440448 
## [524]    train-logloss:0.440278 
## [525]    train-logloss:0.439970 
## [526]    train-logloss:0.439838 
## [527]    train-logloss:0.439590 
## [528]    train-logloss:0.439277 
## [529]    train-logloss:0.439109 
## [530]    train-logloss:0.438884 
## [531]    train-logloss:0.438683 
## [532]    train-logloss:0.438372 
## [533]    train-logloss:0.438206 
## [534]    train-logloss:0.437959 
## [535]    train-logloss:0.437829 
## [536]    train-logloss:0.437664 
## [537]    train-logloss:0.437421 
## [538]    train-logloss:0.437292 
## [539]    train-logloss:0.437049 
## [540]    train-logloss:0.436742 
## [541]    train-logloss:0.436499 
## [542]    train-logloss:0.436336 
## [543]    train-logloss:0.436116 
## [544]    train-logloss:0.435919 
## [545]    train-logloss:0.435614 
## [546]    train-logloss:0.435452 
## [547]    train-logloss:0.435325 
## [548]    train-logloss:0.435085 
## [549]    train-logloss:0.434959 
## [550]    train-logloss:0.434799 
## [551]    train-logloss:0.434582 
## [552]    train-logloss:0.434345 
## [553]    train-logloss:0.434046 
## [554]    train-logloss:0.433743 
## [555]    train-logloss:0.433584 
## [556]    train-logloss:0.433349 
## [557]    train-logloss:0.433225 
## [558]    train-logloss:0.432987 
## [559]    train-logloss:0.432688 
## [560]    train-logloss:0.432495 
## [561]    train-logloss:0.432338 
## [562]    train-logloss:0.432104 
## [563]    train-logloss:0.431891 
## [564]    train-logloss:0.431594 
## [565]    train-logloss:0.431437 
## [566]    train-logloss:0.431315 
## [567]    train-logloss:0.431084 
## [568]    train-logloss:0.430895 
## [569]    train-logloss:0.430741 
## [570]    train-logloss:0.430620 
## [571]    train-logloss:0.430386 
## [572]    train-logloss:0.430091 
## [573]    train-logloss:0.429798 
## [574]    train-logloss:0.429569 
## [575]    train-logloss:0.429315 
## [576]    train-logloss:0.429161 
## [577]    train-logloss:0.429043 
## [578]    train-logloss:0.428816 
## [579]    train-logloss:0.428524 
## [580]    train-logloss:0.428373 
## [581]    train-logloss:0.428121 
## [582]    train-logloss:0.427936 
## [583]    train-logloss:0.427786 
## [584]    train-logloss:0.427668 
## [585]    train-logloss:0.427422 
## [586]    train-logloss:0.427132 
## [587]    train-logloss:0.426949 
## [588]    train-logloss:0.426800 
## [589]    train-logloss:0.426557 
## [590]    train-logloss:0.426269 
## [591]    train-logloss:0.426122 
## [592]    train-logloss:0.425941 
## [593]    train-logloss:0.425826 
## [594]    train-logloss:0.425602 
## [595]    train-logloss:0.425456 
## [596]    train-logloss:0.425172 
## [597]    train-logloss:0.424932 
## [598]    train-logloss:0.424647 
## [599]    train-logloss:0.424469 
## [600]    train-logloss:0.424324 
## [601]    train-logloss:0.424087 
## [602]    train-logloss:0.423806 
## [603]    train-logloss:0.423661 
## [604]    train-logloss:0.423548 
## [605]    train-logloss:0.423326 
## [606]    train-logloss:0.423093 
## [607]    train-logloss:0.422814 
## [608]    train-logloss:0.422639 
## [609]    train-logloss:0.422496 
## [610]    train-logloss:0.422266 
## [611]    train-logloss:0.422124 
## [612]    train-logloss:0.422013 
## [613]    train-logloss:0.421736 
## [614]    train-logloss:0.421507 
## [615]    train-logloss:0.421335 
## [616]    train-logloss:0.421055 
## [617]    train-logloss:0.420836 
## [618]    train-logloss:0.420696 
## [619]    train-logloss:0.420470 
## [620]    train-logloss:0.420195 
## [621]    train-logloss:0.420024 
## [622]    train-logloss:0.419914 
## [623]    train-logloss:0.419775 
## [624]    train-logloss:0.419552 
## [625]    train-logloss:0.419247 
## [626]    train-logloss:0.419079 
## [627]    train-logloss:0.418807 
## [628]    train-logloss:0.418669 
## [629]    train-logloss:0.418369 
## [630]    train-logloss:0.418261 
## [631]    train-logloss:0.418034 
## [632]    train-logloss:0.417813 
## [633]    train-logloss:0.417706 
## [634]    train-logloss:0.417569 
## [635]    train-logloss:0.417353 
## [636]    train-logloss:0.417084 
## [637]    train-logloss:0.416918 
## [638]    train-logloss:0.416696 
## [639]    train-logloss:0.416400 
## [640]    train-logloss:0.416294 
## [641]    train-logloss:0.416159 
## [642]    train-logloss:0.415944 
## [643]    train-logloss:0.415728 
## [644]    train-logloss:0.415491 
## [645]    train-logloss:0.415386 
## [646]    train-logloss:0.415222 
## [647]    train-logloss:0.414956 
## [648]    train-logloss:0.414741 
## [649]    train-logloss:0.414451 
## [650]    train-logloss:0.414290 
## [651]    train-logloss:0.414003 
## [652]    train-logloss:0.413770 
## [653]    train-logloss:0.413666 
## [654]    train-logloss:0.413445 
## [655]    train-logloss:0.413285 
## [656]    train-logloss:0.413003 
## [657]    train-logloss:0.412784 
## [658]    train-logloss:0.412652 
## [659]    train-logloss:0.412549 
## [660]    train-logloss:0.412320 
## [661]    train-logloss:0.412162 
## [662]    train-logloss:0.411951 
## [663]    train-logloss:0.411689 
## [664]    train-logloss:0.411587 
## [665]    train-logloss:0.411456 
## [666]    train-logloss:0.411248 
## [667]    train-logloss:0.410972 
## [668]    train-logloss:0.410747 
## [669]    train-logloss:0.410646 
## [670]    train-logloss:0.410435 
## [671]    train-logloss:0.410215 
## [672]    train-logloss:0.410059 
## [673]    train-logloss:0.409848 
## [674]    train-logloss:0.409748 
## [675]    train-logloss:0.409489 
## [676]    train-logloss:0.409360 
## [677]    train-logloss:0.409155 
## [678]    train-logloss:0.408947 
## [679]    train-logloss:0.408849 
## [680]    train-logloss:0.408642 
## [681]    train-logloss:0.408424 
## [682]    train-logloss:0.408296 
## [683]    train-logloss:0.408205 
## [684]    train-logloss:0.408000 
## [685]    train-logloss:0.407904 
## [686]    train-logloss:0.407700 
## [687]    train-logloss:0.407429 
## [688]    train-logloss:0.407228 
## [689]    train-logloss:0.407075 
## [690]    train-logloss:0.406817 
## [691]    train-logloss:0.406616 
## [692]    train-logloss:0.406397 
## [693]    train-logloss:0.406303 
## [694]    train-logloss:0.406103 
## [695]    train-logloss:0.405838 
## [696]    train-logloss:0.405688 
## [697]    train-logloss:0.405600 
## [698]    train-logloss:0.405339 
## [699]    train-logloss:0.405190 
## [700]    train-logloss:0.404976 
## [701]    train-logloss:0.404778 
## [702]    train-logloss:0.404583 
## [703]    train-logloss:0.404489 
## [704]    train-logloss:0.404236 
## [705]    train-logloss:0.404111 
## [706]    train-logloss:0.403855 
## [707]    train-logloss:0.403662 
## [708]    train-logloss:0.403570 
## [709]    train-logloss:0.403484 
## [710]    train-logloss:0.403265 
## [711]    train-logloss:0.403119 
## [712]    train-logloss:0.402870 
## [713]    train-logloss:0.402674 
## [714]    train-logloss:0.402485 
## [715]    train-logloss:0.402361 
## [716]    train-logloss:0.402270 
## [717]    train-logloss:0.402053 
## [718]    train-logloss:0.401852 
## [719]    train-logloss:0.401729 
## [720]    train-logloss:0.401645 
## [721]    train-logloss:0.401555 
## [722]    train-logloss:0.401342 
## [723]    train-logloss:0.401214 
## [724]    train-logloss:0.400962 
## [725]    train-logloss:0.400818 
## [726]    train-logloss:0.400571 
## [727]    train-logloss:0.400384 
## [728]    train-logloss:0.400193 
## [729]    train-logloss:0.400071 
## [730]    train-logloss:0.399863 
## [731]    train-logloss:0.399774 
## [732]    train-logloss:0.399528 
## [733]    train-logloss:0.399385 
## [734]    train-logloss:0.399197 
## [735]    train-logloss:0.398953 
## [736]    train-logloss:0.398771 
## [737]    train-logloss:0.398567 
## [738]    train-logloss:0.398426 
## [739]    train-logloss:0.398338 
## [740]    train-logloss:0.398093 
## [741]    train-logloss:0.397913 
## [742]    train-logloss:0.397700 
## [743]    train-logloss:0.397613 
## [744]    train-logloss:0.397497 
## [745]    train-logloss:0.397334 
## [746]    train-logloss:0.397252 
## [747]    train-logloss:0.397133 
## [748]    train-logloss:0.397018 
## [749]    train-logloss:0.396779 
## [750]    train-logloss:0.396640 
## [751]    train-logloss:0.396560 
## [752]    train-logloss:0.396399 
## [753]    train-logloss:0.396156 
## [754]    train-logloss:0.396038 
## [755]    train-logloss:0.395862 
## [756]    train-logloss:0.395704 
## [757]    train-logloss:0.395590 
## [758]    train-logloss:0.395510 
## [759]    train-logloss:0.395397 
## [760]    train-logloss:0.395163 
## [761]    train-logloss:0.395007 
## [762]    train-logloss:0.394767 
## [763]    train-logloss:0.394650 
## [764]    train-logloss:0.394513 
## [765]    train-logloss:0.394315 
## [766]    train-logloss:0.394143 
## [767]    train-logloss:0.393988 
## [768]    train-logloss:0.393777 
## [769]    train-logloss:0.393665 
## [770]    train-logloss:0.393550 
## [771]    train-logloss:0.393472 
## [772]    train-logloss:0.393362 
## [773]    train-logloss:0.393124 
## [774]    train-logloss:0.392895 
## [775]    train-logloss:0.392760 
## [776]    train-logloss:0.392551 
## [777]    train-logloss:0.392398 
## [778]    train-logloss:0.392283 
## [779]    train-logloss:0.392114 
## [780]    train-logloss:0.392006 
## [781]    train-logloss:0.391770 
## [782]    train-logloss:0.391636 
## [783]    train-logloss:0.391485 
## [784]    train-logloss:0.391372 
## [785]    train-logloss:0.391295 
## [786]    train-logloss:0.391188 
## [787]    train-logloss:0.390963 
## [788]    train-logloss:0.390770 
## [789]    train-logloss:0.390620 
## [790]    train-logloss:0.390413 
## [791]    train-logloss:0.390191 
## [792]    train-logloss:0.390059 
## [793]    train-logloss:0.389911 
## [794]    train-logloss:0.389732 
## [795]    train-logloss:0.389657 
## [796]    train-logloss:0.389492 
## [797]    train-logloss:0.389287 
## [798]    train-logloss:0.389175 
## [799]    train-logloss:0.388984 
## [800]    train-logloss:0.388892 
## [801]    train-logloss:0.388659 
## [802]    train-logloss:0.388496 
## [803]    train-logloss:0.388349 
## [804]    train-logloss:0.388173 
## [805]    train-logloss:0.388062 
## [806]    train-logloss:0.387845 
## [807]    train-logloss:0.387684 
## [808]    train-logloss:0.387481 
## [809]    train-logloss:0.387337 
## [810]    train-logloss:0.387263 
## [811]    train-logloss:0.387173 
## [812]    train-logloss:0.387000 
## [813]    train-logloss:0.386870 
## [814]    train-logloss:0.386760 
## [815]    train-logloss:0.386572 
## [816]    train-logloss:0.386429 
## [817]    train-logloss:0.386228 
## [818]    train-logloss:0.386139 
## [819]    train-logloss:0.385926 
## [820]    train-logloss:0.385727 
## [821]    train-logloss:0.385556 
## [822]    train-logloss:0.385485 
## [823]    train-logloss:0.385396 
## [824]    train-logloss:0.385288 
## [825]    train-logloss:0.385130 
## [826]    train-logloss:0.384989 
## [827]    train-logloss:0.384822 
## [828]    train-logloss:0.384734 
## [829]    train-logloss:0.384523 
## [830]    train-logloss:0.384453 
## [831]    train-logloss:0.384325 
## [832]    train-logloss:0.384139 
## [833]    train-logloss:0.384000 
## [834]    train-logloss:0.383835 
## [835]    train-logloss:0.383638 
## [836]    train-logloss:0.383431 
## [837]    train-logloss:0.383276 
## [838]    train-logloss:0.383206 
## [839]    train-logloss:0.383100 
## [840]    train-logloss:0.382869 
## [841]    train-logloss:0.382716 
## [842]    train-logloss:0.382612 
## [843]    train-logloss:0.382449 
## [844]    train-logloss:0.382266 
## [845]    train-logloss:0.382070 
## [846]    train-logloss:0.381933 
## [847]    train-logloss:0.381730 
## [848]    train-logloss:0.381603 
## [849]    train-logloss:0.381467 
## [850]    train-logloss:0.381274 
## [851]    train-logloss:0.381206 
## [852]    train-logloss:0.381119 
## [853]    train-logloss:0.380969 
## [854]    train-logloss:0.380865 
## [855]    train-logloss:0.380704 
## [856]    train-logloss:0.380524 
## [857]    train-logloss:0.380439 
## [858]    train-logloss:0.380304 
## [859]    train-logloss:0.380202 
## [860]    train-logloss:0.380042 
## [861]    train-logloss:0.379975 
## [862]    train-logloss:0.379746 
## [863]    train-logloss:0.379598 
## [864]    train-logloss:0.379407 
## [865]    train-logloss:0.379305 
## [866]    train-logloss:0.379172 
## [867]    train-logloss:0.379014 
## [868]    train-logloss:0.378825 
## [869]    train-logloss:0.378759 
## [870]    train-logloss:0.378674 
## [871]    train-logloss:0.378448 
## [872]    train-logloss:0.378302 
## [873]    train-logloss:0.378117 
## [874]    train-logloss:0.377985 
## [875]    train-logloss:0.377795 
## [876]    train-logloss:0.377729 
## [877]    train-logloss:0.377585 
## [878]    train-logloss:0.377485 
## [879]    train-logloss:0.377329 
## [880]    train-logloss:0.377105 
## [881]    train-logloss:0.376963 
## [882]    train-logloss:0.376879 
## [883]    train-logloss:0.376680 
## [884]    train-logloss:0.376503 
## [885]    train-logloss:0.376404 
## [886]    train-logloss:0.376249 
## [887]    train-logloss:0.376052 
## [888]    train-logloss:0.375869 
## [889]    train-logloss:0.375803 
## [890]    train-logloss:0.375720 
## [891]    train-logloss:0.375579 
## [892]    train-logloss:0.375449 
## [893]    train-logloss:0.375228 
## [894]    train-logloss:0.375074 
## [895]    train-logloss:0.374894 
## [896]    train-logloss:0.374828 
## [897]    train-logloss:0.374730 
## [898]    train-logloss:0.374543 
## [899]    train-logloss:0.374404 
## [900]    train-logloss:0.374321 
## [901]    train-logloss:0.374225 
## [902]    train-logloss:0.374048 
## [903]    train-logloss:0.373864 
## [904]    train-logloss:0.373712 
## [905]    train-logloss:0.373647 
## [906]    train-logloss:0.373509 
## [907]    train-logloss:0.373381 
## [908]    train-logloss:0.373206 
## [909]    train-logloss:0.373142 
## [910]    train-logloss:0.372922 
## [911]    train-logloss:0.372786 
## [912]    train-logloss:0.372600 
## [913]    train-logloss:0.372504 
## [914]    train-logloss:0.372354 
## [915]    train-logloss:0.372136 
## [916]    train-logloss:0.371942 
## [917]    train-logloss:0.371821 
## [918]    train-logloss:0.371686 
## [919]    train-logloss:0.371471 
## [920]    train-logloss:0.371288 
## [921]    train-logloss:0.371193 
## [922]    train-logloss:0.371058 
## [923]    train-logloss:0.370886 
## [924]    train-logloss:0.370705 
## [925]    train-logloss:0.370579 
## [926]    train-logloss:0.370515 
## [927]    train-logloss:0.370365 
## [928]    train-logloss:0.370270 
## [929]    train-logloss:0.370138 
## [930]    train-logloss:0.369989 
## [931]    train-logloss:0.369925 
## [932]    train-logloss:0.369755 
## [933]    train-logloss:0.369631 
## [934]    train-logloss:0.369450 
## [935]    train-logloss:0.369370 
## [936]    train-logloss:0.369307 
## [937]    train-logloss:0.369094 
## [938]    train-logloss:0.368962 
## [939]    train-logloss:0.368815 
## [940]    train-logloss:0.368648 
## [941]    train-logloss:0.368525 
## [942]    train-logloss:0.368462 
## [943]    train-logloss:0.368283 
## [944]    train-logloss:0.368190 
## [945]    train-logloss:0.368111 
## [946]    train-logloss:0.367965 
## [947]    train-logloss:0.367835 
## [948]    train-logloss:0.367662 
## [949]    train-logloss:0.367451 
## [950]    train-logloss:0.367272 
## [951]    train-logloss:0.367180 
## [952]    train-logloss:0.367050 
## [953]    train-logloss:0.366972 
## [954]    train-logloss:0.366781 
## [955]    train-logloss:0.366660 
## [956]    train-logloss:0.366531 
## [957]    train-logloss:0.366440 
## [958]    train-logloss:0.366296 
## [959]    train-logloss:0.366133 
## [960]    train-logloss:0.366071 
## [961]    train-logloss:0.365860 
## [962]    train-logloss:0.365683 
## [963]    train-logloss:0.365513 
## [964]    train-logloss:0.365339 
## [965]    train-logloss:0.365171 
## [966]    train-logloss:0.364996 
## [967]    train-logloss:0.364854 
## [968]    train-logloss:0.364727 
## [969]    train-logloss:0.364537 
## [970]    train-logloss:0.364365 
## [971]    train-logloss:0.364275 
## [972]    train-logloss:0.364213 
## [973]    train-logloss:0.364136 
## [974]    train-logloss:0.363970 
## [975]    train-logloss:0.363795 
## [976]    train-logloss:0.363654 
## [977]    train-logloss:0.363490 
## [978]    train-logloss:0.363318 
## [979]    train-logloss:0.363242 
## [980]    train-logloss:0.363085 
## [981]    train-logloss:0.363024 
## [982]    train-logloss:0.362906 
## [983]    train-logloss:0.362767 
## [984]    train-logloss:0.362641 
## [985]    train-logloss:0.362433 
## [986]    train-logloss:0.362309 
## [987]    train-logloss:0.362220 
## [988]    train-logloss:0.362080 
## [989]    train-logloss:0.361926 
## [990]    train-logloss:0.361865 
## [991]    train-logloss:0.361692 
## [992]    train-logloss:0.361487 
## [993]    train-logloss:0.361363 
## [994]    train-logloss:0.361275 
## [995]    train-logloss:0.361138 
## [996]    train-logloss:0.361016 
## [997]    train-logloss:0.360941 
## [998]    train-logloss:0.360755 
## [999]    train-logloss:0.360640 
## [1000]   train-logloss:0.360469
## Make predictions on test data 
moklas3.jk.test.matrix<-data.matrix(moklas3.jk.test[,-9])
promo.test<-as.matrix(as.factor(as.character(moklas3.jk.test$promo)))
predicted <- predict(xgbModel1,moklas3.jk.test.matrix )

predicted <- ifelse(predicted > 0.5 , 1,0)

## Create confusion matrix

confusionMatrix(table(predicted = predicted, actual = promo.test))
## Confusion Matrix and Statistics
## 
##          actual
## predicted  0  1
##         0 29  7
##         1 18 17
##                                           
##                Accuracy : 0.6479          
##                  95% CI : (0.5254, 0.7576)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.6509          
##                                           
##                   Kappa : 0.2925          
##                                           
##  Mcnemar's Test P-Value : 0.0455          
##                                           
##             Sensitivity : 0.6170          
##             Specificity : 0.7083          
##          Pos Pred Value : 0.8056          
##          Neg Pred Value : 0.4857          
##              Prevalence : 0.6620          
##          Detection Rate : 0.4085          
##    Detection Prevalence : 0.5070          
##       Balanced Accuracy : 0.6627          
##                                           
##        'Positive' Class : 0               
## 

ROS

library(xgboost)
up.train.matrix<-data.matrix(up_train[,-(9:10)])
promo2<-as.matrix(as.factor(as.character(up_train$promo)))
xgbModel2 <- xgboost(data = up.train.matrix, 
                    label = promo2,
                    nrounds = 1000,
                    max_depth = 2,
                    eta = 0.01,
                    objective = "binary:logistic")
## [1]  train-logloss:0.691671 
## [2]  train-logloss:0.690224 
## [3]  train-logloss:0.688803 
## [4]  train-logloss:0.687408 
## [5]  train-logloss:0.686040 
## [6]  train-logloss:0.684697 
## [7]  train-logloss:0.683380 
## [8]  train-logloss:0.682086 
## [9]  train-logloss:0.680817 
## [10] train-logloss:0.679597 
## [11] train-logloss:0.678360 
## [12] train-logloss:0.677173 
## [13] train-logloss:0.675961 
## [14] train-logloss:0.674806 
## [15] train-logloss:0.673628 
## [16] train-logloss:0.672503 
## [17] train-logloss:0.671350 
## [18] train-logloss:0.670219 
## [19] train-logloss:0.669131 
## [20] train-logloss:0.668102 
## [21] train-logloss:0.667033 
## [22] train-logloss:0.665954 
## [23] train-logloss:0.664964 
## [24] train-logloss:0.663936 
## [25] train-logloss:0.662972 
## [26] train-logloss:0.661955 
## [27] train-logloss:0.661012 
## [28] train-logloss:0.660005 
## [29] train-logloss:0.659017 
## [30] train-logloss:0.658104 
## [31] train-logloss:0.657139 
## [32] train-logloss:0.656248 
## [33] train-logloss:0.655291 
## [34] train-logloss:0.654415 
## [35] train-logloss:0.653477 
## [36] train-logloss:0.652547 
## [37] train-logloss:0.651697 
## [38] train-logloss:0.650788 
## [39] train-logloss:0.649958 
## [40] train-logloss:0.649058 
## [41] train-logloss:0.648244 
## [42] train-logloss:0.647363 
## [43] train-logloss:0.646564 
## [44] train-logloss:0.645700 
## [45] train-logloss:0.644844 
## [46] train-logloss:0.644069 
## [47] train-logloss:0.643221 
## [48] train-logloss:0.642463 
## [49] train-logloss:0.641633 
## [50] train-logloss:0.640888 
## [51] train-logloss:0.640074 
## [52] train-logloss:0.639259 
## [53] train-logloss:0.638533 
## [54] train-logloss:0.637733 
## [55] train-logloss:0.637021 
## [56] train-logloss:0.636239 
## [57] train-logloss:0.635538 
## [58] train-logloss:0.634773 
## [59] train-logloss:0.634085 
## [60] train-logloss:0.633330 
## [61] train-logloss:0.632656 
## [62] train-logloss:0.631907 
## [63] train-logloss:0.631140 
## [64] train-logloss:0.629747 
## [65] train-logloss:0.628379 
## [66] train-logloss:0.627725 
## [67] train-logloss:0.626384 
## [68] train-logloss:0.625742 
## [69] train-logloss:0.624426 
## [70] train-logloss:0.623795 
## [71] train-logloss:0.622506 
## [72] train-logloss:0.621691 
## [73] train-logloss:0.620427 
## [74] train-logloss:0.619712 
## [75] train-logloss:0.618471 
## [76] train-logloss:0.617858 
## [77] train-logloss:0.616637 
## [78] train-logloss:0.615845 
## [79] train-logloss:0.614646 
## [80] train-logloss:0.614048 
## [81] train-logloss:0.613271 
## [82] train-logloss:0.612090 
## [83] train-logloss:0.611502 
## [84] train-logloss:0.610343 
## [85] train-logloss:0.609673 
## [86] train-logloss:0.608536 
## [87] train-logloss:0.607779 
## [88] train-logloss:0.606659 
## [89] train-logloss:0.606086 
## [90] train-logloss:0.605343 
## [91] train-logloss:0.604242 
## [92] train-logloss:0.603680 
## [93] train-logloss:0.602598 
## [94] train-logloss:0.601870 
## [95] train-logloss:0.601319 
## [96] train-logloss:0.600255 
## [97] train-logloss:0.599629 
## [98] train-logloss:0.598585 
## [99] train-logloss:0.597874 
## [100]    train-logloss:0.596846 
## [101]    train-logloss:0.596311 
## [102]    train-logloss:0.595301 
## [103]    train-logloss:0.594604 
## [104]    train-logloss:0.593909 
## [105]    train-logloss:0.592917 
## [106]    train-logloss:0.591941 
## [107]    train-logloss:0.591257 
## [108]    train-logloss:0.590736 
## [109]    train-logloss:0.589776 
## [110]    train-logloss:0.589197 
## [111]    train-logloss:0.588253 
## [112]    train-logloss:0.587585 
## [113]    train-logloss:0.586657 
## [114]    train-logloss:0.585990 
## [115]    train-logloss:0.585076 
## [116]    train-logloss:0.584420 
## [117]    train-logloss:0.583920 
## [118]    train-logloss:0.583021 
## [119]    train-logloss:0.582372 
## [120]    train-logloss:0.581488 
## [121]    train-logloss:0.580844 
## [122]    train-logloss:0.580359 
## [123]    train-logloss:0.579488 
## [124]    train-logloss:0.578855 
## [125]    train-logloss:0.578319 
## [126]    train-logloss:0.577464 
## [127]    train-logloss:0.576622 
## [128]    train-logloss:0.575998 
## [129]    train-logloss:0.575368 
## [130]    train-logloss:0.574538 
## [131]    train-logloss:0.573918 
## [132]    train-logloss:0.573402 
## [133]    train-logloss:0.572586 
## [134]    train-logloss:0.571976 
## [135]    train-logloss:0.571171 
## [136]    train-logloss:0.570712 
## [137]    train-logloss:0.570114 
## [138]    train-logloss:0.569320 
## [139]    train-logloss:0.568718 
## [140]    train-logloss:0.567937 
## [141]    train-logloss:0.567349 
## [142]    train-logloss:0.566857 
## [143]    train-logloss:0.566088 
## [144]    train-logloss:0.565511 
## [145]    train-logloss:0.564752 
## [146]    train-logloss:0.564272 
## [147]    train-logloss:0.563705 
## [148]    train-logloss:0.562958 
## [149]    train-logloss:0.562400 
## [150]    train-logloss:0.561663 
## [151]    train-logloss:0.561194 
## [152]    train-logloss:0.560645 
## [153]    train-logloss:0.559918 
## [154]    train-logloss:0.559414 
## [155]    train-logloss:0.558875 
## [156]    train-logloss:0.558160 
## [157]    train-logloss:0.557629 
## [158]    train-logloss:0.557174 
## [159]    train-logloss:0.556468 
## [160]    train-logloss:0.555947 
## [161]    train-logloss:0.555528 
## [162]    train-logloss:0.554831 
## [163]    train-logloss:0.554319 
## [164]    train-logloss:0.553876 
## [165]    train-logloss:0.553190 
## [166]    train-logloss:0.552687 
## [167]    train-logloss:0.552011 
## [168]    train-logloss:0.551515 
## [169]    train-logloss:0.551080 
## [170]    train-logloss:0.550413 
## [171]    train-logloss:0.549925 
## [172]    train-logloss:0.549267 
## [173]    train-logloss:0.548787 
## [174]    train-logloss:0.548359 
## [175]    train-logloss:0.547888 
## [176]    train-logloss:0.547238 
## [177]    train-logloss:0.546774 
## [178]    train-logloss:0.546310 
## [179]    train-logloss:0.545671 
## [180]    train-logloss:0.545214 
## [181]    train-logloss:0.544774 
## [182]    train-logloss:0.544359 
## [183]    train-logloss:0.543729 
## [184]    train-logloss:0.543281 
## [185]    train-logloss:0.542660 
## [186]    train-logloss:0.542251 
## [187]    train-logloss:0.541823 
## [188]    train-logloss:0.541308 
## [189]    train-logloss:0.540697 
## [190]    train-logloss:0.540259 
## [191]    train-logloss:0.539880 
## [192]    train-logloss:0.539464 
## [193]    train-logloss:0.538861 
## [194]    train-logloss:0.538462 
## [195]    train-logloss:0.537869 
## [196]    train-logloss:0.537440 
## [197]    train-logloss:0.537071 
## [198]    train-logloss:0.536666 
## [199]    train-logloss:0.536171 
## [200]    train-logloss:0.535585 
## [201]    train-logloss:0.535195 
## [202]    train-logloss:0.534778 
## [203]    train-logloss:0.534384 
## [204]    train-logloss:0.533806 
## [205]    train-logloss:0.533449 
## [206]    train-logloss:0.533066 
## [207]    train-logloss:0.532498 
## [208]    train-logloss:0.532088 
## [209]    train-logloss:0.531704 
## [210]    train-logloss:0.531357 
## [211]    train-logloss:0.530797 
## [212]    train-logloss:0.530319 
## [213]    train-logloss:0.529943 
## [214]    train-logloss:0.529539 
## [215]    train-logloss:0.528988 
## [216]    train-logloss:0.528519 
## [217]    train-logloss:0.528126 
## [218]    train-logloss:0.527759 
## [219]    train-logloss:0.527388 
## [220]    train-logloss:0.526843 
## [221]    train-logloss:0.526511 
## [222]    train-logloss:0.525974 
## [223]    train-logloss:0.525587 
## [224]    train-logloss:0.525231 
## [225]    train-logloss:0.524867 
## [226]    train-logloss:0.524415 
## [227]    train-logloss:0.523883 
## [228]    train-logloss:0.523498 
## [229]    train-logloss:0.522975 
## [230]    train-logloss:0.522598 
## [231]    train-logloss:0.522251 
## [232]    train-logloss:0.521810 
## [233]    train-logloss:0.521294 
## [234]    train-logloss:0.520777 
## [235]    train-logloss:0.520437 
## [236]    train-logloss:0.520004 
## [237]    train-logloss:0.519498 
## [238]    train-logloss:0.518989 
## [239]    train-logloss:0.518656 
## [240]    train-logloss:0.518347 
## [241]    train-logloss:0.517845 
## [242]    train-logloss:0.517421 
## [243]    train-logloss:0.517064 
## [244]    train-logloss:0.516740 
## [245]    train-logloss:0.516244 
## [246]    train-logloss:0.515897 
## [247]    train-logloss:0.515409 
## [248]    train-logloss:0.514921 
## [249]    train-logloss:0.514571 
## [250]    train-logloss:0.514159 
## [251]    train-logloss:0.513843 
## [252]    train-logloss:0.513365 
## [253]    train-logloss:0.512883 
## [254]    train-logloss:0.512479 
## [255]    train-logloss:0.512009 
## [256]    train-logloss:0.511533 
## [257]    train-logloss:0.511196 
## [258]    train-logloss:0.510889 
## [259]    train-logloss:0.510495 
## [260]    train-logloss:0.510035 
## [261]    train-logloss:0.509565 
## [262]    train-logloss:0.509112 
## [263]    train-logloss:0.508811 
## [264]    train-logloss:0.508348 
## [265]    train-logloss:0.507961 
## [266]    train-logloss:0.507516 
## [267]    train-logloss:0.507058 
## [268]    train-logloss:0.506735 
## [269]    train-logloss:0.506378 
## [270]    train-logloss:0.506085 
## [271]    train-logloss:0.505632 
## [272]    train-logloss:0.505255 
## [273]    train-logloss:0.504821 
## [274]    train-logloss:0.504474 
## [275]    train-logloss:0.504244 
## [276]    train-logloss:0.503797 
## [277]    train-logloss:0.503483 
## [278]    train-logloss:0.503257 
## [279]    train-logloss:0.502816 
## [280]    train-logloss:0.502506 
## [281]    train-logloss:0.502165 
## [282]    train-logloss:0.501744 
## [283]    train-logloss:0.501308 
## [284]    train-logloss:0.501003 
## [285]    train-logloss:0.500795 
## [286]    train-logloss:0.500381 
## [287]    train-logloss:0.499951 
## [288]    train-logloss:0.499649 
## [289]    train-logloss:0.499286 
## [290]    train-logloss:0.499082 
## [291]    train-logloss:0.498657 
## [292]    train-logloss:0.498251 
## [293]    train-logloss:0.497955 
## [294]    train-logloss:0.497534 
## [295]    train-logloss:0.497242 
## [296]    train-logloss:0.497043 
## [297]    train-logloss:0.496646 
## [298]    train-logloss:0.496230 
## [299]    train-logloss:0.495941 
## [300]    train-logloss:0.495746 
## [301]    train-logloss:0.495337 
## [302]    train-logloss:0.495052 
## [303]    train-logloss:0.494698 
## [304]    train-logloss:0.494309 
## [305]    train-logloss:0.493986 
## [306]    train-logloss:0.493581 
## [307]    train-logloss:0.493301 
## [308]    train-logloss:0.493111 
## [309]    train-logloss:0.492712 
## [310]    train-logloss:0.492435 
## [311]    train-logloss:0.492054 
## [312]    train-logloss:0.491868 
## [313]    train-logloss:0.491472 
## [314]    train-logloss:0.491199 
## [315]    train-logloss:0.491017 
## [316]    train-logloss:0.490673 
## [317]    train-logloss:0.490283 
## [318]    train-logloss:0.489909 
## [319]    train-logloss:0.489641 
## [320]    train-logloss:0.489373 
## [321]    train-logloss:0.489194 
## [322]    train-logloss:0.488810 
## [323]    train-logloss:0.488545 
## [324]    train-logloss:0.488177 
## [325]    train-logloss:0.487841 
## [326]    train-logloss:0.487460 
## [327]    train-logloss:0.487199 
## [328]    train-logloss:0.487024 
## [329]    train-logloss:0.486649 
## [330]    train-logloss:0.486286 
## [331]    train-logloss:0.486029 
## [332]    train-logloss:0.485726 
## [333]    train-logloss:0.485554 
## [334]    train-logloss:0.485183 
## [335]    train-logloss:0.484855 
## [336]    train-logloss:0.484498 
## [337]    train-logloss:0.484328 
## [338]    train-logloss:0.483961 
## [339]    train-logloss:0.483709 
## [340]    train-logloss:0.483410 
## [341]    train-logloss:0.483048 
## [342]    train-logloss:0.482798 
## [343]    train-logloss:0.482449 
## [344]    train-logloss:0.482128 
## [345]    train-logloss:0.481962 
## [346]    train-logloss:0.481603 
## [347]    train-logloss:0.481303 
## [348]    train-logloss:0.481057 
## [349]    train-logloss:0.480895 
## [350]    train-logloss:0.480603 
## [351]    train-logloss:0.480250 
## [352]    train-logloss:0.480006 
## [353]    train-logloss:0.479663 
## [354]    train-logloss:0.479348 
## [355]    train-logloss:0.478999 
## [356]    train-logloss:0.478759 
## [357]    train-logloss:0.478420 
## [358]    train-logloss:0.478261 
## [359]    train-logloss:0.477974 
## [360]    train-logloss:0.477629 
## [361]    train-logloss:0.477392 
## [362]    train-logloss:0.477135 
## [363]    train-logloss:0.476978 
## [364]    train-logloss:0.476744 
## [365]    train-logloss:0.476404 
## [366]    train-logloss:0.476121 
## [367]    train-logloss:0.475889 
## [368]    train-logloss:0.475552 
## [369]    train-logloss:0.475300 
## [370]    train-logloss:0.475146 
## [371]    train-logloss:0.474916 
## [372]    train-logloss:0.474638 
## [373]    train-logloss:0.474303 
## [374]    train-logloss:0.474075 
## [375]    train-logloss:0.473924 
## [376]    train-logloss:0.473620 
## [377]    train-logloss:0.473291 
## [378]    train-logloss:0.473016 
## [379]    train-logloss:0.472791 
## [380]    train-logloss:0.472640 
## [381]    train-logloss:0.472393 
## [382]    train-logloss:0.472067 
## [383]    train-logloss:0.471845 
## [384]    train-logloss:0.471697 
## [385]    train-logloss:0.471400 
## [386]    train-logloss:0.471120 
## [387]    train-logloss:0.470794 
## [388]    train-logloss:0.470472 
## [389]    train-logloss:0.470254 
## [390]    train-logloss:0.470108 
## [391]    train-logloss:0.469889 
## [392]    train-logloss:0.469645 
## [393]    train-logloss:0.469377 
## [394]    train-logloss:0.469232 
## [395]    train-logloss:0.468915 
## [396]    train-logloss:0.468698 
## [397]    train-logloss:0.468456 
## [398]    train-logloss:0.468192 
## [399]    train-logloss:0.467978 
## [400]    train-logloss:0.467740 
## [401]    train-logloss:0.467598 
## [402]    train-logloss:0.467384 
## [403]    train-logloss:0.467066 
## [404]    train-logloss:0.466777 
## [405]    train-logloss:0.466463 
## [406]    train-logloss:0.466148 
## [407]    train-logloss:0.465888 
## [408]    train-logloss:0.465679 
## [409]    train-logloss:0.465540 
## [410]    train-logloss:0.465329 
## [411]    train-logloss:0.465018 
## [412]    train-logloss:0.464762 
## [413]    train-logloss:0.464525 
## [414]    train-logloss:0.464317 
## [415]    train-logloss:0.464179 
## [416]    train-logloss:0.463872 
## [417]    train-logloss:0.463565 
## [418]    train-logloss:0.463311 
## [419]    train-logloss:0.463107 
## [420]    train-logloss:0.462900 
## [421]    train-logloss:0.462764 
## [422]    train-logloss:0.462532 
## [423]    train-logloss:0.462281 
## [424]    train-logloss:0.462080 
## [425]    train-logloss:0.461850 
## [426]    train-logloss:0.461716 
## [427]    train-logloss:0.461512 
## [428]    train-logloss:0.461210 
## [429]    train-logloss:0.460962 
## [430]    train-logloss:0.460732 
## [431]    train-logloss:0.460559 
## [432]    train-logloss:0.460358 
## [433]    train-logloss:0.460057 
## [434]    train-logloss:0.459780 
## [435]    train-logloss:0.459480 
## [436]    train-logloss:0.459308 
## [437]    train-logloss:0.459011 
## [438]    train-logloss:0.458816 
## [439]    train-logloss:0.458572 
## [440]    train-logloss:0.458375 
## [441]    train-logloss:0.458245 
## [442]    train-logloss:0.457948 
## [443]    train-logloss:0.457675 
## [444]    train-logloss:0.457383 
## [445]    train-logloss:0.457214 
## [446]    train-logloss:0.457023 
## [447]    train-logloss:0.456828 
## [448]    train-logloss:0.456602 
## [449]    train-logloss:0.456380 
## [450]    train-logloss:0.456141 
## [451]    train-logloss:0.456014 
## [452]    train-logloss:0.455848 
## [453]    train-logloss:0.455656 
## [454]    train-logloss:0.455432 
## [455]    train-logloss:0.455140 
## [456]    train-logloss:0.454853 
## [457]    train-logloss:0.454617 
## [458]    train-logloss:0.454430 
## [459]    train-logloss:0.454240 
## [460]    train-logloss:0.453972 
## [461]    train-logloss:0.453678 
## [462]    train-logloss:0.453554 
## [463]    train-logloss:0.453265 
## [464]    train-logloss:0.452984 
## [465]    train-logloss:0.452814 
## [466]    train-logloss:0.452580 
## [467]    train-logloss:0.452393 
## [468]    train-logloss:0.452270 
## [469]    train-logloss:0.452052 
## [470]    train-logloss:0.451821 
## [471]    train-logloss:0.451699 
## [472]    train-logloss:0.451531 
## [473]    train-logloss:0.451246 
## [474]    train-logloss:0.450970 
## [475]    train-logloss:0.450786 
## [476]    train-logloss:0.450666 
## [477]    train-logloss:0.450379 
## [478]    train-logloss:0.450164 
## [479]    train-logloss:0.449999 
## [480]    train-logloss:0.449771 
## [481]    train-logloss:0.449653 
## [482]    train-logloss:0.449371 
## [483]    train-logloss:0.449099 
## [484]    train-logloss:0.448916 
## [485]    train-logloss:0.448655 
## [486]    train-logloss:0.448443 
## [487]    train-logloss:0.448325 
## [488]    train-logloss:0.448169 
## [489]    train-logloss:0.447901 
## [490]    train-logloss:0.447741 
## [491]    train-logloss:0.447517 
## [492]    train-logloss:0.447337 
## [493]    train-logloss:0.447058 
## [494]    train-logloss:0.446837 
## [495]    train-logloss:0.446659 
## [496]    train-logloss:0.446396 
## [497]    train-logloss:0.446242 
## [498]    train-logloss:0.445964 
## [499]    train-logloss:0.445689 
## [500]    train-logloss:0.445480 
## [501]    train-logloss:0.445322 
## [502]    train-logloss:0.445103 
## [503]    train-logloss:0.444989 
## [504]    train-logloss:0.444812 
## [505]    train-logloss:0.444556 
## [506]    train-logloss:0.444297 
## [507]    train-logloss:0.444024 
## [508]    train-logloss:0.443912 
## [509]    train-logloss:0.443758 
## [510]    train-logloss:0.443542 
## [511]    train-logloss:0.443269 
## [512]    train-logloss:0.443096 
## [513]    train-logloss:0.442985 
## [514]    train-logloss:0.442730 
## [515]    train-logloss:0.442525 
## [516]    train-logloss:0.442373 
## [517]    train-logloss:0.442148 
## [518]    train-logloss:0.441880 
## [519]    train-logloss:0.441771 
## [520]    train-logloss:0.441623 
## [521]    train-logloss:0.441515 
## [522]    train-logloss:0.441302 
## [523]    train-logloss:0.441033 
## [524]    train-logloss:0.440862 
## [525]    train-logloss:0.440659 
## [526]    train-logloss:0.440410 
## [527]    train-logloss:0.440303 
## [528]    train-logloss:0.440082 
## [529]    train-logloss:0.439912 
## [530]    train-logloss:0.439806 
## [531]    train-logloss:0.439541 
## [532]    train-logloss:0.439294 
## [533]    train-logloss:0.439125 
## [534]    train-logloss:0.438915 
## [535]    train-logloss:0.438715 
## [536]    train-logloss:0.438610 
## [537]    train-logloss:0.438465 
## [538]    train-logloss:0.438218 
## [539]    train-logloss:0.437958 
## [540]    train-logloss:0.437854 
## [541]    train-logloss:0.437658 
## [542]    train-logloss:0.437451 
## [543]    train-logloss:0.437189 
## [544]    train-logloss:0.437022 
## [545]    train-logloss:0.436780 
## [546]    train-logloss:0.436565 
## [547]    train-logloss:0.436360 
## [548]    train-logloss:0.436257 
## [549]    train-logloss:0.436116 
## [550]    train-logloss:0.435951 
## [551]    train-logloss:0.435850 
## [552]    train-logloss:0.435656 
## [553]    train-logloss:0.435398 
## [554]    train-logloss:0.435159 
## [555]    train-logloss:0.434996 
## [556]    train-logloss:0.434896 
## [557]    train-logloss:0.434693 
## [558]    train-logloss:0.434438 
## [559]    train-logloss:0.434201 
## [560]    train-logloss:0.434040 
## [561]    train-logloss:0.433940 
## [562]    train-logloss:0.433749 
## [563]    train-logloss:0.433610 
## [564]    train-logloss:0.433400 
## [565]    train-logloss:0.433199 
## [566]    train-logloss:0.433101 
## [567]    train-logloss:0.432941 
## [568]    train-logloss:0.432689 
## [569]    train-logloss:0.432456 
## [570]    train-logloss:0.432257 
## [571]    train-logloss:0.432068 
## [572]    train-logloss:0.431970 
## [573]    train-logloss:0.431718 
## [574]    train-logloss:0.431583 
## [575]    train-logloss:0.431386 
## [576]    train-logloss:0.431181 
## [577]    train-logloss:0.430941 
## [578]    train-logloss:0.430712 
## [579]    train-logloss:0.430555 
## [580]    train-logloss:0.430459 
## [581]    train-logloss:0.430209 
## [582]    train-logloss:0.430054 
## [583]    train-logloss:0.429807 
## [584]    train-logloss:0.429579 
## [585]    train-logloss:0.429445 
## [586]    train-logloss:0.429292 
## [587]    train-logloss:0.429067 
## [588]    train-logloss:0.428880 
## [589]    train-logloss:0.428747 
## [590]    train-logloss:0.428653 
## [591]    train-logloss:0.428405 
## [592]    train-logloss:0.428205 
## [593]    train-logloss:0.428012 
## [594]    train-logloss:0.427881 
## [595]    train-logloss:0.427750 
## [596]    train-logloss:0.427566 
## [597]    train-logloss:0.427323 
## [598]    train-logloss:0.427194 
## [599]    train-logloss:0.426997 
## [600]    train-logloss:0.426869 
## [601]    train-logloss:0.426678 
## [602]    train-logloss:0.426526 
## [603]    train-logloss:0.426286 
## [604]    train-logloss:0.426160 
## [605]    train-logloss:0.425940 
## [606]    train-logloss:0.425747 
## [607]    train-logloss:0.425564 
## [608]    train-logloss:0.425439 
## [609]    train-logloss:0.425251 
## [610]    train-logloss:0.425125 
## [611]    train-logloss:0.424975 
## [612]    train-logloss:0.424732 
## [613]    train-logloss:0.424515 
## [614]    train-logloss:0.424324 
## [615]    train-logloss:0.424143 
## [616]    train-logloss:0.424020 
## [617]    train-logloss:0.423833 
## [618]    train-logloss:0.423598 
## [619]    train-logloss:0.423474 
## [620]    train-logloss:0.423353 
## [621]    train-logloss:0.423121 
## [622]    train-logloss:0.422932 
## [623]    train-logloss:0.422809 
## [624]    train-logloss:0.422662 
## [625]    train-logloss:0.422541 
## [626]    train-logloss:0.422354 
## [627]    train-logloss:0.422122 
## [628]    train-logloss:0.421894 
## [629]    train-logloss:0.421716 
## [630]    train-logloss:0.421532 
## [631]    train-logloss:0.421412 
## [632]    train-logloss:0.421187 
## [633]    train-logloss:0.420975 
## [634]    train-logloss:0.420791 
## [635]    train-logloss:0.420672 
## [636]    train-logloss:0.420449 
## [637]    train-logloss:0.420273 
## [638]    train-logloss:0.420129 
## [639]    train-logloss:0.419888 
## [640]    train-logloss:0.419678 
## [641]    train-logloss:0.419457 
## [642]    train-logloss:0.419275 
## [643]    train-logloss:0.419094 
## [644]    train-logloss:0.418977 
## [645]    train-logloss:0.418758 
## [646]    train-logloss:0.418616 
## [647]    train-logloss:0.418528 
## [648]    train-logloss:0.418291 
## [649]    train-logloss:0.418082 
## [650]    train-logloss:0.417903 
## [651]    train-logloss:0.417728 
## [652]    train-logloss:0.417642 
## [653]    train-logloss:0.417408 
## [654]    train-logloss:0.417228 
## [655]    train-logloss:0.417088 
## [656]    train-logloss:0.416882 
## [657]    train-logloss:0.416704 
## [658]    train-logloss:0.416477 
## [659]    train-logloss:0.416260 
## [660]    train-logloss:0.416067 
## [661]    train-logloss:0.415952 
## [662]    train-logloss:0.415762 
## [663]    train-logloss:0.415583 
## [664]    train-logloss:0.415381 
## [665]    train-logloss:0.415167 
## [666]    train-logloss:0.415028 
## [667]    train-logloss:0.414796 
## [668]    train-logloss:0.414620 
## [669]    train-logloss:0.414432 
## [670]    train-logloss:0.414231 
## [671]    train-logloss:0.414019 
## [672]    train-logloss:0.413795 
## [673]    train-logloss:0.413625 
## [674]    train-logloss:0.413542 
## [675]    train-logloss:0.413314 
## [676]    train-logloss:0.413176 
## [677]    train-logloss:0.413000 
## [678]    train-logloss:0.412774 
## [679]    train-logloss:0.412589 
## [680]    train-logloss:0.412390 
## [681]    train-logloss:0.412278 
## [682]    train-logloss:0.412059 
## [683]    train-logloss:0.411848 
## [684]    train-logloss:0.411737 
## [685]    train-logloss:0.411554 
## [686]    train-logloss:0.411387 
## [687]    train-logloss:0.411212 
## [688]    train-logloss:0.411039 
## [689]    train-logloss:0.410903 
## [690]    train-logloss:0.410723 
## [691]    train-logloss:0.410507 
## [692]    train-logloss:0.410341 
## [693]    train-logloss:0.410168 
## [694]    train-logloss:0.409960 
## [695]    train-logloss:0.409851 
## [696]    train-logloss:0.409743 
## [697]    train-logloss:0.409571 
## [698]    train-logloss:0.409393 
## [699]    train-logloss:0.409187 
## [700]    train-logloss:0.409080 
## [701]    train-logloss:0.408885 
## [702]    train-logloss:0.408622 
## [703]    train-logloss:0.408488 
## [704]    train-logloss:0.408324 
## [705]    train-logloss:0.408218 
## [706]    train-logloss:0.408043 
## [707]    train-logloss:0.407831 
## [708]    train-logloss:0.407654 
## [709]    train-logloss:0.407450 
## [710]    train-logloss:0.407345 
## [711]    train-logloss:0.407087 
## [712]    train-logloss:0.406894 
## [713]    train-logloss:0.406727 
## [714]    train-logloss:0.406555 
## [715]    train-logloss:0.406393 
## [716]    train-logloss:0.406223 
## [717]    train-logloss:0.406119 
## [718]    train-logloss:0.405918 
## [719]    train-logloss:0.405665 
## [720]    train-logloss:0.405533 
## [721]    train-logloss:0.405430 
## [722]    train-logloss:0.405264 
## [723]    train-logloss:0.405094 
## [724]    train-logloss:0.404992 
## [725]    train-logloss:0.404833 
## [726]    train-logloss:0.404643 
## [727]    train-logloss:0.404396 
## [728]    train-logloss:0.404266 
## [729]    train-logloss:0.404099 
## [730]    train-logloss:0.403890 
## [731]    train-logloss:0.403790 
## [732]    train-logloss:0.403615 
## [733]    train-logloss:0.403446 
## [734]    train-logloss:0.403247 
## [735]    train-logloss:0.403147 
## [736]    train-logloss:0.402904 
## [737]    train-logloss:0.402717 
## [738]    train-logloss:0.402588 
## [739]    train-logloss:0.402431 
## [740]    train-logloss:0.402259 
## [741]    train-logloss:0.402160 
## [742]    train-logloss:0.401996 
## [743]    train-logloss:0.401791 
## [744]    train-logloss:0.401636 
## [745]    train-logloss:0.401467 
## [746]    train-logloss:0.401369 
## [747]    train-logloss:0.401184 
## [748]    train-logloss:0.400945 
## [749]    train-logloss:0.400848 
## [750]    train-logloss:0.400687 
## [751]    train-logloss:0.400490 
## [752]    train-logloss:0.400329 
## [753]    train-logloss:0.400233 
## [754]    train-logloss:0.399998 
## [755]    train-logloss:0.399816 
## [756]    train-logloss:0.399689 
## [757]    train-logloss:0.399487 
## [758]    train-logloss:0.399329 
## [759]    train-logloss:0.399161 
## [760]    train-logloss:0.398931 
## [761]    train-logloss:0.398778 
## [762]    train-logloss:0.398611 
## [763]    train-logloss:0.398516 
## [764]    train-logloss:0.398336 
## [765]    train-logloss:0.398210 
## [766]    train-logloss:0.398116 
## [767]    train-logloss:0.397960 
## [768]    train-logloss:0.397794 
## [769]    train-logloss:0.397600 
## [770]    train-logloss:0.397374 
## [771]    train-logloss:0.397196 
## [772]    train-logloss:0.397039 
## [773]    train-logloss:0.396914 
## [774]    train-logloss:0.396715 
## [775]    train-logloss:0.396622 
## [776]    train-logloss:0.396468 
## [777]    train-logloss:0.396317 
## [778]    train-logloss:0.396096 
## [779]    train-logloss:0.396003 
## [780]    train-logloss:0.395880 
## [781]    train-logloss:0.395728 
## [782]    train-logloss:0.395532 
## [783]    train-logloss:0.395382 
## [784]    train-logloss:0.395207 
## [785]    train-logloss:0.395116 
## [786]    train-logloss:0.394951 
## [787]    train-logloss:0.394733 
## [788]    train-logloss:0.394570 
## [789]    train-logloss:0.394408 
## [790]    train-logloss:0.394317 
## [791]    train-logloss:0.394124 
## [792]    train-logloss:0.393909 
## [793]    train-logloss:0.393756 
## [794]    train-logloss:0.393583 
## [795]    train-logloss:0.393372 
## [796]    train-logloss:0.393250 
## [797]    train-logloss:0.393101 
## [798]    train-logloss:0.393011 
## [799]    train-logloss:0.392864 
## [800]    train-logloss:0.392704 
## [801]    train-logloss:0.392615 
## [802]    train-logloss:0.392444 
## [803]    train-logloss:0.392253 
## [804]    train-logloss:0.392045 
## [805]    train-logloss:0.391925 
## [806]    train-logloss:0.391778 
## [807]    train-logloss:0.391585 
## [808]    train-logloss:0.391497 
## [809]    train-logloss:0.391339 
## [810]    train-logloss:0.391134 
## [811]    train-logloss:0.390965 
## [812]    train-logloss:0.390878 
## [813]    train-logloss:0.390733 
## [814]    train-logloss:0.390577 
## [815]    train-logloss:0.390490 
## [816]    train-logloss:0.390346 
## [817]    train-logloss:0.390154 
## [818]    train-logloss:0.389965 
## [819]    train-logloss:0.389764 
## [820]    train-logloss:0.389597 
## [821]    train-logloss:0.389511 
## [822]    train-logloss:0.389314 
## [823]    train-logloss:0.389194 
## [824]    train-logloss:0.389109 
## [825]    train-logloss:0.388967 
## [826]    train-logloss:0.388824 
## [827]    train-logloss:0.388670 
## [828]    train-logloss:0.388505 
## [829]    train-logloss:0.388420 
## [830]    train-logloss:0.388231 
## [831]    train-logloss:0.388084 
## [832]    train-logloss:0.387931 
## [833]    train-logloss:0.387744 
## [834]    train-logloss:0.387550 
## [835]    train-logloss:0.387466 
## [836]    train-logloss:0.387349 
## [837]    train-logloss:0.387207 
## [838]    train-logloss:0.387068 
## [839]    train-logloss:0.386905 
## [840]    train-logloss:0.386822 
## [841]    train-logloss:0.386632 
## [842]    train-logloss:0.386486 
## [843]    train-logloss:0.386335 
## [844]    train-logloss:0.386149 
## [845]    train-logloss:0.386010 
## [846]    train-logloss:0.385824 
## [847]    train-logloss:0.385636 
## [848]    train-logloss:0.385555 
## [849]    train-logloss:0.385418 
## [850]    train-logloss:0.385258 
## [851]    train-logloss:0.385108 
## [852]    train-logloss:0.385027 
## [853]    train-logloss:0.384911 
## [854]    train-logloss:0.384831 
## [855]    train-logloss:0.384696 
## [856]    train-logloss:0.384512 
## [857]    train-logloss:0.384365 
## [858]    train-logloss:0.384228 
## [859]    train-logloss:0.384148 
## [860]    train-logloss:0.383964 
## [861]    train-logloss:0.383805 
## [862]    train-logloss:0.383622 
## [863]    train-logloss:0.383441 
## [864]    train-logloss:0.383299 
## [865]    train-logloss:0.383183 
## [866]    train-logloss:0.383037 
## [867]    train-logloss:0.382959 
## [868]    train-logloss:0.382823 
## [869]    train-logloss:0.382680 
## [870]    train-logloss:0.382602 
## [871]    train-logloss:0.382469 
## [872]    train-logloss:0.382309 
## [873]    train-logloss:0.382152 
## [874]    train-logloss:0.381970 
## [875]    train-logloss:0.381792 
## [876]    train-logloss:0.381652 
## [877]    train-logloss:0.381537 
## [878]    train-logloss:0.381406 
## [879]    train-logloss:0.381224 
## [880]    train-logloss:0.381049 
## [881]    train-logloss:0.380893 
## [882]    train-logloss:0.380817 
## [883]    train-logloss:0.380674 
## [884]    train-logloss:0.380500 
## [885]    train-logloss:0.380424 
## [886]    train-logloss:0.380290 
## [887]    train-logloss:0.380153 
## [888]    train-logloss:0.380077 
## [889]    train-logloss:0.379898 
## [890]    train-logloss:0.379719 
## [891]    train-logloss:0.379548 
## [892]    train-logloss:0.379419 
## [893]    train-logloss:0.379266 
## [894]    train-logloss:0.379191 
## [895]    train-logloss:0.379023 
## [896]    train-logloss:0.378909 
## [897]    train-logloss:0.378731 
## [898]    train-logloss:0.378595 
## [899]    train-logloss:0.378463 
## [900]    train-logloss:0.378389 
## [901]    train-logloss:0.378237 
## [902]    train-logloss:0.378060 
## [903]    train-logloss:0.377987 
## [904]    train-logloss:0.377820 
## [905]    train-logloss:0.377686 
## [906]    train-logloss:0.377544 
## [907]    train-logloss:0.377472 
## [908]    train-logloss:0.377404 
## [909]    train-logloss:0.377277 
## [910]    train-logloss:0.377205 
## [911]    train-logloss:0.377065 
## [912]    train-logloss:0.376901 
## [913]    train-logloss:0.376752 
## [914]    train-logloss:0.376576 
## [915]    train-logloss:0.376464 
## [916]    train-logloss:0.376308 
## [917]    train-logloss:0.376133 
## [918]    train-logloss:0.375969 
## [919]    train-logloss:0.375836 
## [920]    train-logloss:0.375698 
## [921]    train-logloss:0.375627 
## [922]    train-logloss:0.375561 
## [923]    train-logloss:0.375432 
## [924]    train-logloss:0.375295 
## [925]    train-logloss:0.375225 
## [926]    train-logloss:0.375064 
## [927]    train-logloss:0.374953 
## [928]    train-logloss:0.374805 
## [929]    train-logloss:0.374735 
## [930]    train-logloss:0.374610 
## [931]    train-logloss:0.374479 
## [932]    train-logloss:0.374304 
## [933]    train-logloss:0.374146 
## [934]    train-logloss:0.374077 
## [935]    train-logloss:0.373931 
## [936]    train-logloss:0.373758 
## [937]    train-logloss:0.373601 
## [938]    train-logloss:0.373532 
## [939]    train-logloss:0.373402 
## [940]    train-logloss:0.373338 
## [941]    train-logloss:0.373210 
## [942]    train-logloss:0.373075 
## [943]    train-logloss:0.373007 
## [944]    train-logloss:0.372853 
## [945]    train-logloss:0.372742 
## [946]    train-logloss:0.372588 
## [947]    train-logloss:0.372498 
## [948]    train-logloss:0.372375 
## [949]    train-logloss:0.372231 
## [950]    train-logloss:0.372061 
## [951]    train-logloss:0.371909 
## [952]    train-logloss:0.371841 
## [953]    train-logloss:0.371712 
## [954]    train-logloss:0.371578 
## [955]    train-logloss:0.371511 
## [956]    train-logloss:0.371360 
## [957]    train-logloss:0.371294 
## [958]    train-logloss:0.371121 
## [959]    train-logloss:0.370988 
## [960]    train-logloss:0.370845 
## [961]    train-logloss:0.370676 
## [962]    train-logloss:0.370527 
## [963]    train-logloss:0.370439 
## [964]    train-logloss:0.370312 
## [965]    train-logloss:0.370165 
## [966]    train-logloss:0.370105 
## [967]    train-logloss:0.369979 
## [968]    train-logloss:0.369893 
## [969]    train-logloss:0.369742 
## [970]    train-logloss:0.369621 
## [971]    train-logloss:0.369480 
## [972]    train-logloss:0.369334 
## [973]    train-logloss:0.369163 
## [974]    train-logloss:0.369024 
## [975]    train-logloss:0.368857 
## [976]    train-logloss:0.368791 
## [977]    train-logloss:0.368648 
## [978]    train-logloss:0.368516 
## [979]    train-logloss:0.368450 
## [980]    train-logloss:0.368324 
## [981]    train-logloss:0.368194 
## [982]    train-logloss:0.368129 
## [983]    train-logloss:0.367986 
## [984]    train-logloss:0.367928 
## [985]    train-logloss:0.367819 
## [986]    train-logloss:0.367700 
## [987]    train-logloss:0.367562 
## [988]    train-logloss:0.367397 
## [989]    train-logloss:0.367256 
## [990]    train-logloss:0.367086 
## [991]    train-logloss:0.366957 
## [992]    train-logloss:0.366893 
## [993]    train-logloss:0.366753 
## [994]    train-logloss:0.366671 
## [995]    train-logloss:0.366535 
## [996]    train-logloss:0.366385 
## [997]    train-logloss:0.366321 
## [998]    train-logloss:0.366198 
## [999]    train-logloss:0.366071 
## [1000]   train-logloss:0.366007
## Make predictions on test data 
moklas3.jk.test.matrix<-data.matrix(moklas3.jk.test[,-9])
promo.test<-as.matrix(as.factor(as.character(moklas3.jk.test$promo)))
predicted <- predict(xgbModel2,moklas3.jk.test.matrix )

predicted <- ifelse(predicted > 0.5 , 1,0)

## Create confusion matrix

confusionMatrix(table(predicted = predicted, actual = promo.test))
## Confusion Matrix and Statistics
## 
##          actual
## predicted  0  1
##         0 35  7
##         1 12 17
##                                           
##                Accuracy : 0.7324          
##                  95% CI : (0.6141, 0.8306)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.1285          
##                                           
##                   Kappa : 0.431           
##                                           
##  Mcnemar's Test P-Value : 0.3588          
##                                           
##             Sensitivity : 0.7447          
##             Specificity : 0.7083          
##          Pos Pred Value : 0.8333          
##          Neg Pred Value : 0.5862          
##              Prevalence : 0.6620          
##          Detection Rate : 0.4930          
##    Detection Prevalence : 0.5915          
##       Balanced Accuracy : 0.7265          
##                                           
##        'Positive' Class : 0               
## 

SMOTE

library(xgboost)
smote.train.matrix<-data.matrix(smote_train[,-9])
promo3<-as.matrix(as.factor(as.character(smote_train$promo)))
xgbModel3 <- xgboost(data = smote.train.matrix, 
                    label = promo3,
                    nrounds = 1000,
                    max_depth = 2,
                    eta = 0.01,
                    objective = "binary:logistic")
## [1]  train-logloss:0.691175 
## [2]  train-logloss:0.689240 
## [3]  train-logloss:0.687343 
## [4]  train-logloss:0.685482 
## [5]  train-logloss:0.683656 
## [6]  train-logloss:0.681865 
## [7]  train-logloss:0.680108 
## [8]  train-logloss:0.678384 
## [9]  train-logloss:0.676691 
## [10] train-logloss:0.675026 
## [11] train-logloss:0.673384 
## [12] train-logloss:0.671932 
## [13] train-logloss:0.670508 
## [14] train-logloss:0.669110 
## [15] train-logloss:0.667726 
## [16] train-logloss:0.666184 
## [17] train-logloss:0.664853 
## [18] train-logloss:0.663529 
## [19] train-logloss:0.662240 
## [20] train-logloss:0.660962 
## [21] train-logloss:0.659506 
## [22] train-logloss:0.658261 
## [23] train-logloss:0.656840 
## [24] train-logloss:0.655625 
## [25] train-logloss:0.654235 
## [26] train-logloss:0.652864 
## [27] train-logloss:0.651686 
## [28] train-logloss:0.650520 
## [29] train-logloss:0.649193 
## [30] train-logloss:0.648059 
## [31] train-logloss:0.646765 
## [32] train-logloss:0.645646 
## [33] train-logloss:0.644376 
## [34] train-logloss:0.643284 
## [35] train-logloss:0.642043 
## [36] train-logloss:0.640817 
## [37] train-logloss:0.639770 
## [38] train-logloss:0.638719 
## [39] train-logloss:0.637528 
## [40] train-logloss:0.636521 
## [41] train-logloss:0.635357 
## [42] train-logloss:0.634344 
## [43] train-logloss:0.633201 
## [44] train-logloss:0.632240 
## [45] train-logloss:0.631260 
## [46] train-logloss:0.630147 
## [47] train-logloss:0.629221 
## [48] train-logloss:0.628133 
## [49] train-logloss:0.627187 
## [50] train-logloss:0.626117 
## [51] train-logloss:0.625235 
## [52] train-logloss:0.624187 
## [53] train-logloss:0.623274 
## [54] train-logloss:0.622246 
## [55] train-logloss:0.621352 
## [56] train-logloss:0.620341 
## [57] train-logloss:0.619508 
## [58] train-logloss:0.618640 
## [59] train-logloss:0.617653 
## [60] train-logloss:0.616848 
## [61] train-logloss:0.615883 
## [62] train-logloss:0.615043 
## [63] train-logloss:0.614093 
## [64] train-logloss:0.613323 
## [65] train-logloss:0.612507 
## [66] train-logloss:0.611578 
## [67] train-logloss:0.610833 
## [68] train-logloss:0.609922 
## [69] train-logloss:0.609020 
## [70] train-logloss:0.608237 
## [71] train-logloss:0.607350 
## [72] train-logloss:0.606637 
## [73] train-logloss:0.605766 
## [74] train-logloss:0.605010 
## [75] train-logloss:0.604151 
## [76] train-logloss:0.603464 
## [77] train-logloss:0.602621 
## [78] train-logloss:0.601891 
## [79] train-logloss:0.601060 
## [80] train-logloss:0.600238 
## [81] train-logloss:0.599527 
## [82] train-logloss:0.598718 
## [83] train-logloss:0.597941 
## [84] train-logloss:0.597143 
## [85] train-logloss:0.596503 
## [86] train-logloss:0.595745 
## [87] train-logloss:0.594965 
## [88] train-logloss:0.594288 
## [89] train-logloss:0.593523 
## [90] train-logloss:0.592785 
## [91] train-logloss:0.592034 
## [92] train-logloss:0.591312 
## [93] train-logloss:0.590569 
## [94] train-logloss:0.589916 
## [95] train-logloss:0.589211 
## [96] train-logloss:0.588483 
## [97] train-logloss:0.587791 
## [98] train-logloss:0.587075 
## [99] train-logloss:0.586442 
## [100]    train-logloss:0.585767 
## [101]    train-logloss:0.585064 
## [102]    train-logloss:0.584402 
## [103]    train-logloss:0.583707 
## [104]    train-logloss:0.583123 
## [105]    train-logloss:0.582474 
## [106]    train-logloss:0.581902 
## [107]    train-logloss:0.581266 
## [108]    train-logloss:0.580706 
## [109]    train-logloss:0.580082 
## [110]    train-logloss:0.579484 
## [111]    train-logloss:0.578872 
## [112]    train-logloss:0.578249 
## [113]    train-logloss:0.577712 
## [114]    train-logloss:0.577105 
## [115]    train-logloss:0.576579 
## [116]    train-logloss:0.575987 
## [117]    train-logloss:0.575392 
## [118]    train-logloss:0.574882 
## [119]    train-logloss:0.574306 
## [120]    train-logloss:0.573724 
## [121]    train-logloss:0.573226 
## [122]    train-logloss:0.572665 
## [123]    train-logloss:0.572181 
## [124]    train-logloss:0.571634 
## [125]    train-logloss:0.571067 
## [126]    train-logloss:0.570593 
## [127]    train-logloss:0.570061 
## [128]    train-logloss:0.569498 
## [129]    train-logloss:0.568945 
## [130]    train-logloss:0.568424 
## [131]    train-logloss:0.567965 
## [132]    train-logloss:0.567528 
## [133]    train-logloss:0.567018 
## [134]    train-logloss:0.566572 
## [135]    train-logloss:0.565919 
## [136]    train-logloss:0.565421 
## [137]    train-logloss:0.564997 
## [138]    train-logloss:0.564500 
## [139]    train-logloss:0.564012 
## [140]    train-logloss:0.563372 
## [141]    train-logloss:0.562945 
## [142]    train-logloss:0.562469 
## [143]    train-logloss:0.562059 
## [144]    train-logloss:0.561431 
## [145]    train-logloss:0.560902 
## [146]    train-logloss:0.560502 
## [147]    train-logloss:0.560037 
## [148]    train-logloss:0.559421 
## [149]    train-logloss:0.559014 
## [150]    train-logloss:0.558560 
## [151]    train-logloss:0.558171 
## [152]    train-logloss:0.557566 
## [153]    train-logloss:0.557097 
## [154]    train-logloss:0.556652 
## [155]    train-logloss:0.556273 
## [156]    train-logloss:0.555884 
## [157]    train-logloss:0.555450 
## [158]    train-logloss:0.554858 
## [159]    train-logloss:0.554432 
## [160]    train-logloss:0.554062 
## [161]    train-logloss:0.553477 
## [162]    train-logloss:0.553101 
## [163]    train-logloss:0.552683 
## [164]    train-logloss:0.552109 
## [165]    train-logloss:0.551753 
## [166]    train-logloss:0.551304 
## [167]    train-logloss:0.550895 
## [168]    train-logloss:0.550531 
## [169]    train-logloss:0.549967 
## [170]    train-logloss:0.549567 
## [171]    train-logloss:0.549222 
## [172]    train-logloss:0.548666 
## [173]    train-logloss:0.548314 
## [174]    train-logloss:0.547921 
## [175]    train-logloss:0.547488 
## [176]    train-logloss:0.547154 
## [177]    train-logloss:0.546768 
## [178]    train-logloss:0.546223 
## [179]    train-logloss:0.545880 
## [180]    train-logloss:0.545341 
## [181]    train-logloss:0.545016 
## [182]    train-logloss:0.544639 
## [183]    train-logloss:0.544168 
## [184]    train-logloss:0.543849 
## [185]    train-logloss:0.543480 
## [186]    train-logloss:0.542950 
## [187]    train-logloss:0.542636 
## [188]    train-logloss:0.542308 
## [189]    train-logloss:0.541786 
## [190]    train-logloss:0.541423 
## [191]    train-logloss:0.541118 
## [192]    train-logloss:0.540764 
## [193]    train-logloss:0.540271 
## [194]    train-logloss:0.539950 
## [195]    train-logloss:0.539437 
## [196]    train-logloss:0.539139 
## [197]    train-logloss:0.538792 
## [198]    train-logloss:0.538311 
## [199]    train-logloss:0.537998 
## [200]    train-logloss:0.537602 
## [201]    train-logloss:0.537260 
## [202]    train-logloss:0.536970 
## [203]    train-logloss:0.536499 
## [204]    train-logloss:0.536164 
## [205]    train-logloss:0.535860 
## [206]    train-logloss:0.535364 
## [207]    train-logloss:0.535080 
## [208]    train-logloss:0.534751 
## [209]    train-logloss:0.534291 
## [210]    train-logloss:0.533995 
## [211]    train-logloss:0.533509 
## [212]    train-logloss:0.533231 
## [213]    train-logloss:0.532910 
## [214]    train-logloss:0.532461 
## [215]    train-logloss:0.532144 
## [216]    train-logloss:0.531855 
## [217]    train-logloss:0.531456 
## [218]    train-logloss:0.531186 
## [219]    train-logloss:0.530876 
## [220]    train-logloss:0.530593 
## [221]    train-logloss:0.530154 
## [222]    train-logloss:0.529848 
## [223]    train-logloss:0.529429 
## [224]    train-logloss:0.529166 
## [225]    train-logloss:0.528695 
## [226]    train-logloss:0.528435 
## [227]    train-logloss:0.528136 
## [228]    train-logloss:0.527708 
## [229]    train-logloss:0.527436 
## [230]    train-logloss:0.527057 
## [231]    train-logloss:0.526764 
## [232]    train-logloss:0.526509 
## [233]    train-logloss:0.526137 
## [234]    train-logloss:0.525849 
## [235]    train-logloss:0.525599 
## [236]    train-logloss:0.525235 
## [237]    train-logloss:0.524950 
## [238]    train-logloss:0.524685 
## [239]    train-logloss:0.524441 
## [240]    train-logloss:0.524160 
## [241]    train-logloss:0.523879 
## [242]    train-logloss:0.523523 
## [243]    train-logloss:0.523248 
## [244]    train-logloss:0.523007 
## [245]    train-logloss:0.522731 
## [246]    train-logloss:0.522312 
## [247]    train-logloss:0.521966 
## [248]    train-logloss:0.521697 
## [249]    train-logloss:0.521460 
## [250]    train-logloss:0.521194 
## [251]    train-logloss:0.520782 
## [252]    train-logloss:0.520548 
## [253]    train-logloss:0.520276 
## [254]    train-logloss:0.520015 
## [255]    train-logloss:0.519609 
## [256]    train-logloss:0.519376 
## [257]    train-logloss:0.519109 
## [258]    train-logloss:0.518851 
## [259]    train-logloss:0.518621 
## [260]    train-logloss:0.518288 
## [261]    train-logloss:0.517890 
## [262]    train-logloss:0.517637 
## [263]    train-logloss:0.517374 
## [264]    train-logloss:0.517147 
## [265]    train-logloss:0.516755 
## [266]    train-logloss:0.516321 
## [267]    train-logloss:0.516073 
## [268]    train-logloss:0.515846 
## [269]    train-logloss:0.515419 
## [270]    train-logloss:0.515196 
## [271]    train-logloss:0.514938 
## [272]    train-logloss:0.514616 
## [273]    train-logloss:0.514374 
## [274]    train-logloss:0.513986 
## [275]    train-logloss:0.513748 
## [276]    train-logloss:0.513330 
## [277]    train-logloss:0.513014 
## [278]    train-logloss:0.512776 
## [279]    train-logloss:0.512524 
## [280]    train-logloss:0.512290 
## [281]    train-logloss:0.511770 
## [282]    train-logloss:0.511388 
## [283]    train-logloss:0.511174 
## [284]    train-logloss:0.510940 
## [285]    train-logloss:0.510431 
## [286]    train-logloss:0.510057 
## [287]    train-logloss:0.509749 
## [288]    train-logloss:0.509380 
## [289]    train-logloss:0.509169 
## [290]    train-logloss:0.508866 
## [291]    train-logloss:0.508634 
## [292]    train-logloss:0.508237 
## [293]    train-logloss:0.507871 
## [294]    train-logloss:0.507645 
## [295]    train-logloss:0.507346 
## [296]    train-logloss:0.506956 
## [297]    train-logloss:0.506661 
## [298]    train-logloss:0.506455 
## [299]    train-logloss:0.506094 
## [300]    train-logloss:0.505865 
## [301]    train-logloss:0.505644 
## [302]    train-logloss:0.505399 
## [303]    train-logloss:0.505018 
## [304]    train-logloss:0.504727 
## [305]    train-logloss:0.504330 
## [306]    train-logloss:0.504105 
## [307]    train-logloss:0.503751 
## [308]    train-logloss:0.503532 
## [309]    train-logloss:0.503159 
## [310]    train-logloss:0.502874 
## [311]    train-logloss:0.502487 
## [312]    train-logloss:0.502265 
## [313]    train-logloss:0.502026 
## [314]    train-logloss:0.501811 
## [315]    train-logloss:0.501336 
## [316]    train-logloss:0.500970 
## [317]    train-logloss:0.500624 
## [318]    train-logloss:0.500345 
## [319]    train-logloss:0.499968 
## [320]    train-logloss:0.499750 
## [321]    train-logloss:0.499539 
## [322]    train-logloss:0.499303 
## [323]    train-logloss:0.498840 
## [324]    train-logloss:0.498483 
## [325]    train-logloss:0.498114 
## [326]    train-logloss:0.497921 
## [327]    train-logloss:0.497582 
## [328]    train-logloss:0.497311 
## [329]    train-logloss:0.497096 
## [330]    train-logloss:0.496747 
## [331]    train-logloss:0.496518 
## [332]    train-logloss:0.496251 
## [333]    train-logloss:0.495893 
## [334]    train-logloss:0.495687 
## [335]    train-logloss:0.495475 
## [336]    train-logloss:0.495145 
## [337]    train-logloss:0.494803 
## [338]    train-logloss:0.494580 
## [339]    train-logloss:0.494319 
## [340]    train-logloss:0.494114 
## [341]    train-logloss:0.493778 
## [342]    train-logloss:0.493520 
## [343]    train-logloss:0.493301 
## [344]    train-logloss:0.492955 
## [345]    train-logloss:0.492746 
## [346]    train-logloss:0.492556 
## [347]    train-logloss:0.492326 
## [348]    train-logloss:0.492005 
## [349]    train-logloss:0.491752 
## [350]    train-logloss:0.491424 
## [351]    train-logloss:0.491210 
## [352]    train-logloss:0.490871 
## [353]    train-logloss:0.490446 
## [354]    train-logloss:0.490246 
## [355]    train-logloss:0.489932 
## [356]    train-logloss:0.489728 
## [357]    train-logloss:0.489407 
## [358]    train-logloss:0.489210 
## [359]    train-logloss:0.488960 
## [360]    train-logloss:0.488651 
## [361]    train-logloss:0.488443 
## [362]    train-logloss:0.488130 
## [363]    train-logloss:0.487715 
## [364]    train-logloss:0.487521 
## [365]    train-logloss:0.487273 
## [366]    train-logloss:0.486964 
## [367]    train-logloss:0.486817 
## [368]    train-logloss:0.486487 
## [369]    train-logloss:0.486287 
## [370]    train-logloss:0.486029 
## [371]    train-logloss:0.485839 
## [372]    train-logloss:0.485614 
## [373]    train-logloss:0.485312 
## [374]    train-logloss:0.485168 
## [375]    train-logloss:0.484844 
## [376]    train-logloss:0.484641 
## [377]    train-logloss:0.484396 
## [378]    train-logloss:0.484218 
## [379]    train-logloss:0.484077 
## [380]    train-logloss:0.483825 
## [381]    train-logloss:0.483604 
## [382]    train-logloss:0.483286 
## [383]    train-logloss:0.483102 
## [384]    train-logloss:0.482694 
## [385]    train-logloss:0.482398 
## [386]    train-logloss:0.482258 
## [387]    train-logloss:0.482016 
## [388]    train-logloss:0.481769 
## [389]    train-logloss:0.481459 
## [390]    train-logloss:0.481260 
## [391]    train-logloss:0.481122 
## [392]    train-logloss:0.480950 
## [393]    train-logloss:0.480733 
## [394]    train-logloss:0.480597 
## [395]    train-logloss:0.480291 
## [396]    train-logloss:0.480112 
## [397]    train-logloss:0.479715 
## [398]    train-logloss:0.479477 
## [399]    train-logloss:0.479235 
## [400]    train-logloss:0.479069 
## [401]    train-logloss:0.478680 
## [402]    train-logloss:0.478546 
## [403]    train-logloss:0.478246 
## [404]    train-logloss:0.477864 
## [405]    train-logloss:0.477701 
## [406]    train-logloss:0.477568 
## [407]    train-logloss:0.477333 
## [408]    train-logloss:0.477097 
## [409]    train-logloss:0.476927 
## [410]    train-logloss:0.476553 
## [411]    train-logloss:0.476268 
## [412]    train-logloss:0.476035 
## [413]    train-logloss:0.475845 
## [414]    train-logloss:0.475549 
## [415]    train-logloss:0.475420 
## [416]    train-logloss:0.475260 
## [417]    train-logloss:0.475031 
## [418]    train-logloss:0.474800 
## [419]    train-logloss:0.474614 
## [420]    train-logloss:0.474446 
## [421]    train-logloss:0.474232 
## [422]    train-logloss:0.473943 
## [423]    train-logloss:0.473815 
## [424]    train-logloss:0.473690 
## [425]    train-logloss:0.473533 
## [426]    train-logloss:0.473170 
## [427]    train-logloss:0.472943 
## [428]    train-logloss:0.472657 
## [429]    train-logloss:0.472431 
## [430]    train-logloss:0.472147 
## [431]    train-logloss:0.471985 
## [432]    train-logloss:0.471629 
## [433]    train-logloss:0.471506 
## [434]    train-logloss:0.471224 
## [435]    train-logloss:0.470998 
## [436]    train-logloss:0.470816 
## [437]    train-logloss:0.470595 
## [438]    train-logloss:0.470317 
## [439]    train-logloss:0.470164 
## [440]    train-logloss:0.469954 
## [441]    train-logloss:0.469833 
## [442]    train-logloss:0.469484 
## [443]    train-logloss:0.469206 
## [444]    train-logloss:0.468903 
## [445]    train-logloss:0.468686 
## [446]    train-logloss:0.468412 
## [447]    train-logloss:0.468232 
## [448]    train-logloss:0.468083 
## [449]    train-logloss:0.467963 
## [450]    train-logloss:0.467665 
## [451]    train-logloss:0.467391 
## [452]    train-logloss:0.467051 
## [453]    train-logloss:0.466781 
## [454]    train-logloss:0.466488 
## [455]    train-logloss:0.466370 
## [456]    train-logloss:0.466158 
## [457]    train-logloss:0.465869 
## [458]    train-logloss:0.465600 
## [459]    train-logloss:0.465335 
## [460]    train-logloss:0.465158 
## [461]    train-logloss:0.464875 
## [462]    train-logloss:0.464757 
## [463]    train-logloss:0.464549 
## [464]    train-logloss:0.464341 
## [465]    train-logloss:0.464081 
## [466]    train-logloss:0.463907 
## [467]    train-logloss:0.463685 
## [468]    train-logloss:0.463538 
## [469]    train-logloss:0.463206 
## [470]    train-logloss:0.462939 
## [471]    train-logloss:0.462662 
## [472]    train-logloss:0.462546 
## [473]    train-logloss:0.462341 
## [474]    train-logloss:0.462085 
## [475]    train-logloss:0.461821 
## [476]    train-logloss:0.461548 
## [477]    train-logloss:0.461433 
## [478]    train-logloss:0.461164 
## [479]    train-logloss:0.460903 
## [480]    train-logloss:0.460638 
## [481]    train-logloss:0.460385 
## [482]    train-logloss:0.460212 
## [483]    train-logloss:0.460099 
## [484]    train-logloss:0.459898 
## [485]    train-logloss:0.459637 
## [486]    train-logloss:0.459387 
## [487]    train-logloss:0.459217 
## [488]    train-logloss:0.458960 
## [489]    train-logloss:0.458703 
## [490]    train-logloss:0.458590 
## [491]    train-logloss:0.458337 
## [492]    train-logloss:0.458140 
## [493]    train-logloss:0.457894 
## [494]    train-logloss:0.457690 
## [495]    train-logloss:0.457523 
## [496]    train-logloss:0.457412 
## [497]    train-logloss:0.457169 
## [498]    train-logloss:0.456916 
## [499]    train-logloss:0.456666 
## [500]    train-logloss:0.456555 
## [501]    train-logloss:0.456236 
## [502]    train-logloss:0.456094 
## [503]    train-logloss:0.455843 
## [504]    train-logloss:0.455597 
## [505]    train-logloss:0.455403 
## [506]    train-logloss:0.455165 
## [507]    train-logloss:0.454923 
## [508]    train-logloss:0.454757 
## [509]    train-logloss:0.454507 
## [510]    train-logloss:0.454267 
## [511]    train-logloss:0.454094 
## [512]    train-logloss:0.453903 
## [513]    train-logloss:0.453668 
## [514]    train-logloss:0.453560 
## [515]    train-logloss:0.453418 
## [516]    train-logloss:0.453202 
## [517]    train-logloss:0.453037 
## [518]    train-logloss:0.452800 
## [519]    train-logloss:0.452556 
## [520]    train-logloss:0.452246 
## [521]    train-logloss:0.452139 
## [522]    train-logloss:0.451907 
## [523]    train-logloss:0.451673 
## [524]    train-logloss:0.451432 
## [525]    train-logloss:0.451200 
## [526]    train-logloss:0.451013 
## [527]    train-logloss:0.450850 
## [528]    train-logloss:0.450622 
## [529]    train-logloss:0.450392 
## [530]    train-logloss:0.450224 
## [531]    train-logloss:0.450118 
## [532]    train-logloss:0.449980 
## [533]    train-logloss:0.449679 
## [534]    train-logloss:0.449441 
## [535]    train-logloss:0.449257 
## [536]    train-logloss:0.449030 
## [537]    train-logloss:0.448804 
## [538]    train-logloss:0.448644 
## [539]    train-logloss:0.448479 
## [540]    train-logloss:0.448342 
## [541]    train-logloss:0.448106 
## [542]    train-logloss:0.448003 
## [543]    train-logloss:0.447780 
## [544]    train-logloss:0.447557 
## [545]    train-logloss:0.447346 
## [546]    train-logloss:0.447163 
## [547]    train-logloss:0.447003 
## [548]    train-logloss:0.446902 
## [549]    train-logloss:0.446670 
## [550]    train-logloss:0.446535 
## [551]    train-logloss:0.446337 
## [552]    train-logloss:0.446045 
## [553]    train-logloss:0.445823 
## [554]    train-logloss:0.445603 
## [555]    train-logloss:0.445442 
## [556]    train-logloss:0.445341 
## [557]    train-logloss:0.445162 
## [558]    train-logloss:0.445031 
## [559]    train-logloss:0.444743 
## [560]    train-logloss:0.444514 
## [561]    train-logloss:0.444296 
## [562]    train-logloss:0.444079 
## [563]    train-logloss:0.443863 
## [564]    train-logloss:0.443705 
## [565]    train-logloss:0.443497 
## [566]    train-logloss:0.443270 
## [567]    train-logloss:0.443173 
## [568]    train-logloss:0.442960 
## [569]    train-logloss:0.442783 
## [570]    train-logloss:0.442569 
## [571]    train-logloss:0.442412 
## [572]    train-logloss:0.442316 
## [573]    train-logloss:0.442106 
## [574]    train-logloss:0.441894 
## [575]    train-logloss:0.441671 
## [576]    train-logloss:0.441461 
## [577]    train-logloss:0.441307 
## [578]    train-logloss:0.441176 
## [579]    train-logloss:0.441081 
## [580]    train-logloss:0.440907 
## [581]    train-logloss:0.440697 
## [582]    train-logloss:0.440490 
## [583]    train-logloss:0.440269 
## [584]    train-logloss:0.440115 
## [585]    train-logloss:0.440021 
## [586]    train-logloss:0.439893 
## [587]    train-logloss:0.439699 
## [588]    train-logloss:0.439421 
## [589]    train-logloss:0.439202 
## [590]    train-logloss:0.439030 
## [591]    train-logloss:0.438822 
## [592]    train-logloss:0.438729 
## [593]    train-logloss:0.438524 
## [594]    train-logloss:0.438379 
## [595]    train-logloss:0.438176 
## [596]    train-logloss:0.438050 
## [597]    train-logloss:0.437812 
## [598]    train-logloss:0.437642 
## [599]    train-logloss:0.437443 
## [600]    train-logloss:0.437293 
## [601]    train-logloss:0.437168 
## [602]    train-logloss:0.436976 
## [603]    train-logloss:0.436705 
## [604]    train-logloss:0.436490 
## [605]    train-logloss:0.436284 
## [606]    train-logloss:0.436194 
## [607]    train-logloss:0.435992 
## [608]    train-logloss:0.435825 
## [609]    train-logloss:0.435681 
## [610]    train-logloss:0.435477 
## [611]    train-logloss:0.435354 
## [612]    train-logloss:0.435088 
## [613]    train-logloss:0.434874 
## [614]    train-logloss:0.434675 
## [615]    train-logloss:0.434586 
## [616]    train-logloss:0.434388 
## [617]    train-logloss:0.434223 
## [618]    train-logloss:0.434081 
## [619]    train-logloss:0.433868 
## [620]    train-logloss:0.433667 
## [621]    train-logloss:0.433472 
## [622]    train-logloss:0.433327 
## [623]    train-logloss:0.433239 
## [624]    train-logloss:0.433008 
## [625]    train-logloss:0.432845 
## [626]    train-logloss:0.432705 
## [627]    train-logloss:0.432510 
## [628]    train-logloss:0.432389 
## [629]    train-logloss:0.432180 
## [630]    train-logloss:0.431991 
## [631]    train-logloss:0.431791 
## [632]    train-logloss:0.431565 
## [633]    train-logloss:0.431478 
## [634]    train-logloss:0.431285 
## [635]    train-logloss:0.431125 
## [636]    train-logloss:0.431005 
## [637]    train-logloss:0.430746 
## [638]    train-logloss:0.430661 
## [639]    train-logloss:0.430471 
## [640]    train-logloss:0.430264 
## [641]    train-logloss:0.430066 
## [642]    train-logloss:0.429878 
## [643]    train-logloss:0.429739 
## [644]    train-logloss:0.429535 
## [645]    train-logloss:0.429348 
## [646]    train-logloss:0.429012 
## [647]    train-logloss:0.428791 
## [648]    train-logloss:0.428633 
## [649]    train-logloss:0.428496 
## [650]    train-logloss:0.428355 
## [651]    train-logloss:0.428160 
## [652]    train-logloss:0.428076 
## [653]    train-logloss:0.427958 
## [654]    train-logloss:0.427820 
## [655]    train-logloss:0.427602 
## [656]    train-logloss:0.427414 
## [657]    train-logloss:0.427259 
## [658]    train-logloss:0.427123 
## [659]    train-logloss:0.426937 
## [660]    train-logloss:0.426820 
## [661]    train-logloss:0.426568 
## [662]    train-logloss:0.426375 
## [663]    train-logloss:0.426160 
## [664]    train-logloss:0.425976 
## [665]    train-logloss:0.425776 
## [666]    train-logloss:0.425623 
## [667]    train-logloss:0.425508 
## [668]    train-logloss:0.425372 
## [669]    train-logloss:0.425290 
## [670]    train-logloss:0.425108 
## [671]    train-logloss:0.424974 
## [672]    train-logloss:0.424823 
## [673]    train-logloss:0.424710 
## [674]    train-logloss:0.424463 
## [675]    train-logloss:0.424263 
## [676]    train-logloss:0.424020 
## [677]    train-logloss:0.423909 
## [678]    train-logloss:0.423829 
## [679]    train-logloss:0.423646 
## [680]    train-logloss:0.423454 
## [681]    train-logloss:0.423257 
## [682]    train-logloss:0.423077 
## [683]    train-logloss:0.422928 
## [684]    train-logloss:0.422795 
## [685]    train-logloss:0.422606 
## [686]    train-logloss:0.422527 
## [687]    train-logloss:0.422349 
## [688]    train-logloss:0.422152 
## [689]    train-logloss:0.421942 
## [690]    train-logloss:0.421766 
## [691]    train-logloss:0.421442 
## [692]    train-logloss:0.421264 
## [693]    train-logloss:0.421056 
## [694]    train-logloss:0.420882 
## [695]    train-logloss:0.420750 
## [696]    train-logloss:0.420604 
## [697]    train-logloss:0.420418 
## [698]    train-logloss:0.420225 
## [699]    train-logloss:0.420053 
## [700]    train-logloss:0.419975 
## [701]    train-logloss:0.419769 
## [702]    train-logloss:0.419599 
## [703]    train-logloss:0.419469 
## [704]    train-logloss:0.419285 
## [705]    train-logloss:0.418968 
## [706]    train-logloss:0.418840 
## [707]    train-logloss:0.418709 
## [708]    train-logloss:0.418565 
## [709]    train-logloss:0.418384 
## [710]    train-logloss:0.418083 
## [711]    train-logloss:0.417880 
## [712]    train-logloss:0.417803 
## [713]    train-logloss:0.417692 
## [714]    train-logloss:0.417510 
## [715]    train-logloss:0.417320 
## [716]    train-logloss:0.417150 
## [717]    train-logloss:0.417008 
## [718]    train-logloss:0.416699 
## [719]    train-logloss:0.416572 
## [720]    train-logloss:0.416394 
## [721]    train-logloss:0.416101 
## [722]    train-logloss:0.415930 
## [723]    train-logloss:0.415742 
## [724]    train-logloss:0.415544 
## [725]    train-logloss:0.415375 
## [726]    train-logloss:0.415236 
## [727]    train-logloss:0.415107 
## [728]    train-logloss:0.414933 
## [729]    train-logloss:0.414808 
## [730]    train-logloss:0.414519 
## [731]    train-logloss:0.414409 
## [732]    train-logloss:0.414335 
## [733]    train-logloss:0.414171 
## [734]    train-logloss:0.414064 
## [735]    train-logloss:0.413866 
## [736]    train-logloss:0.413688 
## [737]    train-logloss:0.413614 
## [738]    train-logloss:0.413476 
## [739]    train-logloss:0.413304 
## [740]    train-logloss:0.413136 
## [741]    train-logloss:0.412941 
## [742]    train-logloss:0.412834 
## [743]    train-logloss:0.412658 
## [744]    train-logloss:0.412492 
## [745]    train-logloss:0.412368 
## [746]    train-logloss:0.412295 
## [747]    train-logloss:0.412159 
## [748]    train-logloss:0.411991 
## [749]    train-logloss:0.411869 
## [750]    train-logloss:0.411570 
## [751]    train-logloss:0.411290 
## [752]    train-logloss:0.411097 
## [753]    train-logloss:0.410933 
## [754]    train-logloss:0.410768 
## [755]    train-logloss:0.410601 
## [756]    train-logloss:0.410467 
## [757]    train-logloss:0.410294 
## [758]    train-logloss:0.410174 
## [759]    train-logloss:0.409898 
## [760]    train-logloss:0.409792 
## [761]    train-logloss:0.409631 
## [762]    train-logloss:0.409560 
## [763]    train-logloss:0.409397 
## [764]    train-logloss:0.409264 
## [765]    train-logloss:0.409100 
## [766]    train-logloss:0.409029 
## [767]    train-logloss:0.408858 
## [768]    train-logloss:0.408667 
## [769]    train-logloss:0.408563 
## [770]    train-logloss:0.408444 
## [771]    train-logloss:0.408283 
## [772]    train-logloss:0.408152 
## [773]    train-logloss:0.407989 
## [774]    train-logloss:0.407801 
## [775]    train-logloss:0.407684 
## [776]    train-logloss:0.407413 
## [777]    train-logloss:0.407310 
## [778]    train-logloss:0.407140 
## [779]    train-logloss:0.407019 
## [780]    train-logloss:0.406861 
## [781]    train-logloss:0.406792 
## [782]    train-logloss:0.406689 
## [783]    train-logloss:0.406504 
## [784]    train-logloss:0.406343 
## [785]    train-logloss:0.406213 
## [786]    train-logloss:0.406054 
## [787]    train-logloss:0.405938 
## [788]    train-logloss:0.405779 
## [789]    train-logloss:0.405710 
## [790]    train-logloss:0.405609 
## [791]    train-logloss:0.405441 
## [792]    train-logloss:0.405327 
## [793]    train-logloss:0.405060 
## [794]    train-logloss:0.404876 
## [795]    train-logloss:0.404748 
## [796]    train-logloss:0.404592 
## [797]    train-logloss:0.404458 
## [798]    train-logloss:0.404302 
## [799]    train-logloss:0.404120 
## [800]    train-logloss:0.403962 
## [801]    train-logloss:0.403699 
## [802]    train-logloss:0.403543 
## [803]    train-logloss:0.403342 
## [804]    train-logloss:0.403229 
## [805]    train-logloss:0.402970 
## [806]    train-logloss:0.402843 
## [807]    train-logloss:0.402688 
## [808]    train-logloss:0.402621 
## [809]    train-logloss:0.402441 
## [810]    train-logloss:0.402276 
## [811]    train-logloss:0.402079 
## [812]    train-logloss:0.401922 
## [813]    train-logloss:0.401854 
## [814]    train-logloss:0.401728 
## [815]    train-logloss:0.401617 
## [816]    train-logloss:0.401422 
## [817]    train-logloss:0.401268 
## [818]    train-logloss:0.401106 
## [819]    train-logloss:0.401039 
## [820]    train-logloss:0.400861 
## [821]    train-logloss:0.400708 
## [822]    train-logloss:0.400590 
## [823]    train-logloss:0.400466 
## [824]    train-logloss:0.400313 
## [825]    train-logloss:0.400122 
## [826]    train-logloss:0.400011 
## [827]    train-logloss:0.399756 
## [828]    train-logloss:0.399581 
## [829]    train-logloss:0.399480 
## [830]    train-logloss:0.399347 
## [831]    train-logloss:0.399095 
## [832]    train-logloss:0.398921 
## [833]    train-logloss:0.398856 
## [834]    train-logloss:0.398667 
## [835]    train-logloss:0.398514 
## [836]    train-logloss:0.398266 
## [837]    train-logloss:0.398143 
## [838]    train-logloss:0.397994 
## [839]    train-logloss:0.397822 
## [840]    train-logloss:0.397635 
## [841]    train-logloss:0.397526 
## [842]    train-logloss:0.397282 
## [843]    train-logloss:0.397123 
## [844]    train-logloss:0.396972 
## [845]    train-logloss:0.396802 
## [846]    train-logloss:0.396669 
## [847]    train-logloss:0.396429 
## [848]    train-logloss:0.396260 
## [849]    train-logloss:0.396195 
## [850]    train-logloss:0.396011 
## [851]    train-logloss:0.395861 
## [852]    train-logloss:0.395624 
## [853]    train-logloss:0.395502 
## [854]    train-logloss:0.395354 
## [855]    train-logloss:0.395172 
## [856]    train-logloss:0.395004 
## [857]    train-logloss:0.394897 
## [858]    train-logloss:0.394663 
## [859]    train-logloss:0.394570 
## [860]    train-logloss:0.394456 
## [861]    train-logloss:0.394326 
## [862]    train-logloss:0.394181 
## [863]    train-logloss:0.394015 
## [864]    train-logloss:0.393786 
## [865]    train-logloss:0.393605 
## [866]    train-logloss:0.393440 
## [867]    train-logloss:0.393291 
## [868]    train-logloss:0.393064 
## [869]    train-logloss:0.392945 
## [870]    train-logloss:0.392799 
## [871]    train-logloss:0.392736 
## [872]    train-logloss:0.392581 
## [873]    train-logloss:0.392403 
## [874]    train-logloss:0.392240 
## [875]    train-logloss:0.392133 
## [876]    train-logloss:0.391909 
## [877]    train-logloss:0.391818 
## [878]    train-logloss:0.391756 
## [879]    train-logloss:0.391580 
## [880]    train-logloss:0.391433 
## [881]    train-logloss:0.391213 
## [882]    train-logloss:0.391026 
## [883]    train-logloss:0.390874 
## [884]    train-logloss:0.390730 
## [885]    train-logloss:0.390558 
## [886]    train-logloss:0.390413 
## [887]    train-logloss:0.390294 
## [888]    train-logloss:0.390077 
## [889]    train-logloss:0.389892 
## [890]    train-logloss:0.389830 
## [891]    train-logloss:0.389713 
## [892]    train-logloss:0.389542 
## [893]    train-logloss:0.389413 
## [894]    train-logloss:0.389198 
## [895]    train-logloss:0.389037 
## [896]    train-logloss:0.388898 
## [897]    train-logloss:0.388715 
## [898]    train-logloss:0.388572 
## [899]    train-logloss:0.388445 
## [900]    train-logloss:0.388339 
## [901]    train-logloss:0.388127 
## [902]    train-logloss:0.387946 
## [903]    train-logloss:0.387805 
## [904]    train-logloss:0.387690 
## [905]    train-logloss:0.387519 
## [906]    train-logloss:0.387380 
## [907]    train-logloss:0.387320 
## [908]    train-logloss:0.387109 
## [909]    train-logloss:0.386972 
## [910]    train-logloss:0.386793 
## [911]    train-logloss:0.386668 
## [912]    train-logloss:0.386524 
## [913]    train-logloss:0.386410 
## [914]    train-logloss:0.386272 
## [915]    train-logloss:0.386104 
## [916]    train-logloss:0.385955 
## [917]    train-logloss:0.385798 
## [918]    train-logloss:0.385694 
## [919]    train-logloss:0.385486 
## [920]    train-logloss:0.385308 
## [921]    train-logloss:0.385166 
## [922]    train-logloss:0.385107 
## [923]    train-logloss:0.384941 
## [924]    train-logloss:0.384805 
## [925]    train-logloss:0.384692 
## [926]    train-logloss:0.384486 
## [927]    train-logloss:0.384350 
## [928]    train-logloss:0.384174 
## [929]    train-logloss:0.384051 
## [930]    train-logloss:0.383904 
## [931]    train-logloss:0.383801 
## [932]    train-logloss:0.383598 
## [933]    train-logloss:0.383424 
## [934]    train-logloss:0.383313 
## [935]    train-logloss:0.383178 
## [936]    train-logloss:0.383013 
## [937]    train-logloss:0.382904 
## [938]    train-logloss:0.382733 
## [939]    train-logloss:0.382611 
## [940]    train-logloss:0.382466 
## [941]    train-logloss:0.382264 
## [942]    train-logloss:0.382101 
## [943]    train-logloss:0.381961 
## [944]    train-logloss:0.381903 
## [945]    train-logloss:0.381802 
## [946]    train-logloss:0.381601 
## [947]    train-logloss:0.381491 
## [948]    train-logloss:0.381358 
## [949]    train-logloss:0.381225 
## [950]    train-logloss:0.381055 
## [951]    train-logloss:0.380893 
## [952]    train-logloss:0.380750 
## [953]    train-logloss:0.380644 
## [954]    train-logloss:0.380512 
## [955]    train-logloss:0.380403 
## [956]    train-logloss:0.380283 
## [957]    train-logloss:0.380085 
## [958]    train-logloss:0.379931 
## [959]    train-logloss:0.379800 
## [960]    train-logloss:0.379640 
## [961]    train-logloss:0.379472 
## [962]    train-logloss:0.379353 
## [963]    train-logloss:0.379253 
## [964]    train-logloss:0.379057 
## [965]    train-logloss:0.378911 
## [966]    train-logloss:0.378855 
## [967]    train-logloss:0.378747 
## [968]    train-logloss:0.378606 
## [969]    train-logloss:0.378441 
## [970]    train-logloss:0.378324 
## [971]    train-logloss:0.378194 
## [972]    train-logloss:0.378139 
## [973]    train-logloss:0.377945 
## [974]    train-logloss:0.377816 
## [975]    train-logloss:0.377656 
## [976]    train-logloss:0.377518 
## [977]    train-logloss:0.377412 
## [978]    train-logloss:0.377248 
## [979]    train-logloss:0.377108 
## [980]    train-logloss:0.376916 
## [981]    train-logloss:0.376789 
## [982]    train-logloss:0.376631 
## [983]    train-logloss:0.376469 
## [984]    train-logloss:0.376364 
## [985]    train-logloss:0.376237 
## [986]    train-logloss:0.376138 
## [987]    train-logloss:0.375948 
## [988]    train-logloss:0.375671 
## [989]    train-logloss:0.375566 
## [990]    train-logloss:0.375423 
## [991]    train-logloss:0.375273 
## [992]    train-logloss:0.375146 
## [993]    train-logloss:0.374986 
## [994]    train-logloss:0.374870 
## [995]    train-logloss:0.374773 
## [996]    train-logloss:0.374501 
## [997]    train-logloss:0.374313 
## [998]    train-logloss:0.374209 
## [999]    train-logloss:0.374084 
## [1000]   train-logloss:0.373928
## Make predictions on test data 
moklas3.jk.test.matrix<-data.matrix(moklas3.jk.test[,-9])
promo.test<-as.matrix(as.factor(as.character(moklas3.jk.test$promo)))
predicted <- predict(xgbModel3,moklas3.jk.test.matrix )

predicted <- ifelse(predicted > 0.5 , 1,0)

## Create confusion matrix

confusionMatrix(table(predicted = predicted, actual = promo.test))
## Confusion Matrix and Statistics
## 
##          actual
## predicted  0  1
##         0 28  6
##         1 19 18
##                                           
##                Accuracy : 0.6479          
##                  95% CI : (0.5254, 0.7576)
##     No Information Rate : 0.662           
##     P-Value [Acc > NIR] : 0.6509          
##                                           
##                   Kappa : 0.3053          
##                                           
##  Mcnemar's Test P-Value : 0.0164          
##                                           
##             Sensitivity : 0.5957          
##             Specificity : 0.7500          
##          Pos Pred Value : 0.8235          
##          Neg Pred Value : 0.4865          
##              Prevalence : 0.6620          
##          Detection Rate : 0.3944          
##    Detection Prevalence : 0.4789          
##       Balanced Accuracy : 0.6729          
##                                           
##        'Positive' Class : 0               
##