Install Packages

#install.packages("caret")
#install.packages("pROC")
#install.packages("e1071")
#install.packages("ROCR")
#install.packages("nnet")
library(data.table)
library(caret)
library(pROC)
library(e1071)
library(ROCR)
library(nnet)
# limited plot methods but very good to start learning ANN,Can use Neuralnet library
# check "https://beckmw.wordpress.com/tag/nnet/" to learn more

Load Data

attrition_data_nn = read.table("D:/Analytics/BACP-Dec2017/10_MachineLearning/HR_Employee_Attrition_Data.csv", sep = ",", header = T)

Data Distribution & Type

str(attrition_data_nn)
## 'data.frame':    2940 obs. of  35 variables:
##  $ EmployeeNumber          : int  1 2 3 4 5 6 7 8 9 10 ...
##  $ Attrition               : Factor w/ 2 levels "No","Yes": 2 1 2 1 1 1 1 1 1 1 ...
##  $ Age                     : int  41 49 37 33 27 32 59 30 38 36 ...
##  $ BusinessTravel          : Factor w/ 3 levels "Non-Travel","Travel_Frequently",..: 3 2 3 2 3 2 3 3 2 3 ...
##  $ DailyRate               : int  1102 279 1373 1392 591 1005 1324 1358 216 1299 ...
##  $ Department              : Factor w/ 3 levels "Human Resources",..: 3 2 2 2 2 2 2 2 2 2 ...
##  $ DistanceFromHome        : int  1 8 2 3 2 2 3 24 23 27 ...
##  $ Education               : int  2 1 2 4 1 2 3 1 3 3 ...
##  $ EducationField          : Factor w/ 6 levels "Human Resources",..: 2 2 5 2 4 2 4 2 2 4 ...
##  $ EmployeeCount           : int  1 1 1 1 1 1 1 1 1 1 ...
##  $ EnvironmentSatisfaction : int  2 3 4 4 1 4 3 4 4 3 ...
##  $ Gender                  : Factor w/ 2 levels "Female","Male": 1 2 2 1 2 2 1 2 2 2 ...
##  $ HourlyRate              : int  94 61 92 56 40 79 81 67 44 94 ...
##  $ JobInvolvement          : int  3 2 2 3 3 3 4 3 2 3 ...
##  $ JobLevel                : int  2 2 1 1 1 1 1 1 3 2 ...
##  $ JobRole                 : Factor w/ 9 levels "Healthcare Representative",..: 8 7 3 7 3 3 3 3 5 1 ...
##  $ JobSatisfaction         : int  4 2 3 3 2 4 1 3 3 3 ...
##  $ MaritalStatus           : Factor w/ 3 levels "Divorced","Married",..: 3 2 3 2 2 3 2 1 3 2 ...
##  $ MonthlyIncome           : int  5993 5130 2090 2909 3468 3068 2670 2693 9526 5237 ...
##  $ MonthlyRate             : int  19479 24907 2396 23159 16632 11864 9964 13335 8787 16577 ...
##  $ NumCompaniesWorked      : int  8 1 6 1 9 0 4 1 0 6 ...
##  $ Over18                  : Factor w/ 1 level "Y": 1 1 1 1 1 1 1 1 1 1 ...
##  $ OverTime                : Factor w/ 2 levels "No","Yes": 2 1 2 2 1 1 2 1 1 1 ...
##  $ PercentSalaryHike       : int  11 23 15 11 12 13 20 22 21 13 ...
##  $ PerformanceRating       : int  3 4 3 3 3 3 4 4 4 3 ...
##  $ RelationshipSatisfaction: int  1 4 2 3 4 3 1 2 2 2 ...
##  $ StandardHours           : int  80 80 80 80 80 80 80 80 80 80 ...
##  $ StockOptionLevel        : int  0 1 0 0 1 0 3 1 0 2 ...
##  $ TotalWorkingYears       : int  8 10 7 8 6 8 12 1 10 17 ...
##  $ TrainingTimesLastYear   : int  0 3 3 3 3 2 3 2 2 3 ...
##  $ WorkLifeBalance         : int  1 3 3 3 3 2 2 3 3 2 ...
##  $ YearsAtCompany          : int  6 10 0 8 2 7 1 1 9 7 ...
##  $ YearsInCurrentRole      : int  4 7 0 7 2 7 0 0 7 7 ...
##  $ YearsSinceLastPromotion : int  0 1 0 3 2 3 0 0 1 7 ...
##  $ YearsWithCurrManager    : int  5 7 0 0 2 6 0 0 8 7 ...

Remove feature with constant values else will give error for Neural Network
Remove columns with near zero variance as features should has variance in its distribution

nzv <- nearZeroVar(attrition_data_nn)
nzv
## [1] 10 22 27

we get “StandardHours, EmployeeCount, Over18” as column with near zero variance.

Remove these near zero variables from the dataset

attrition_data_nn <- attrition_data_nn[,-nzv]

Dependent variable distribution split

attrition.ratio = table(attrition_data_nn$Attrition)
attrition.ratio
## 
##   No  Yes 
## 2466  474
attrition.prop = prop.table(attrition.ratio)
attrition.prop 
## 
##        No       Yes 
## 0.8387755 0.1612245

16% of the data has Attrition = “Yes”, which is good enough to model.

Split data into Training and Test dataset

set.seed(111)
trainIndex <- createDataPartition(attrition_data_nn$Attrition,
                                  p = .7,
                                  list = FALSE,
                                  times = 1)

train.data <- attrition_data_nn[trainIndex,]
test.data  <- attrition_data_nn[-trainIndex,]

Distribution of partition data

prop.table((table(train.data$Attrition)))
## 
##        No       Yes 
## 0.8387567 0.1612433
prop.table((table(test.data$Attrition)))
## 
##        No       Yes 
## 0.8388195 0.1611805

The data is well distributed in the training and validation sets, if the seed used is not changes.

Cross Validation
Cross Validation is a technique which involves reserving a particular sample of a data set on which you do not train the model. Later, you test the model on this sample before finalizing the model.

Set metric value as Accuracy or Kappa for neural Network model

metric <- "Accuracy"

Create a control structure for traing NN algo with required switches

Arguments for trainControl()
> method: the resampling method - cv
> number: Either the number of folds or number of resampling iterations
> classProbs: a logical; should class probabilities be computed for classification models (along with predicted values) in each resample?
> verbosrIter: A logical for printing a training log
> summaryFunction: the function used to select the optimal tuning parameter. Caret also includes a function for AUC called twoClassSummary().
> preProcOptions: A list of options to pass to preProcess. The type of pre-processing (e.g. center, scaling etc) is passed in via the preProc option in train.

cctrl <-  trainControl(
  method = 'cv',number = 10,classProbs = TRUE,
  verboseIter = TRUE,   summaryFunction = twoClassSummary,
  preProcOptions = list(
    thresh = 0.75,ICAcomp = 3,  k = 5
  ) 
)

Define decay and sizes of hidden neuron to train algorithm on
When you train a neural network (nnet) using Caret you need to specify two hyper-parameters: size and decay.
> Size is the number of units in hidden layer (nnet fit a single hidden layer neural network) and
> decay is the regularization parameter to avoid over-fitting.

my.grid <- expand.grid(.decay = c(0.1, 0.001, 0.0001), .size = c(5, 10, 15))

Train/create Neural Network Model
> method: string specifying which classification or regression model to use
> preProcess: A string vector that defines a pre-processing of the predictor data
> trControl: A list of values that define how this function acts
> tuneGrid: A data frame with possible tuning values. The columns are named the same as the tuning parameters

nn_model <-
  train( Attrition ~ ., data = train.data,
         method = 'nnet',preProcess = c('center', 'scale'),
         trControl = cctrl,tuneGrid = my.grid )
## Warning in train.default(x, y, weights = w, ...): The metric "Accuracy" was
## not in the result set. ROC will be used instead.
## + Fold01: decay=1e-01, size= 5 
## # weights:  236
## initial  value 1569.670614 
## iter  10 value 567.071720
## iter  20 value 489.535457
## iter  30 value 407.206187
## iter  40 value 361.292256
## iter  50 value 336.492679
## iter  60 value 322.319105
## iter  70 value 312.430895
## iter  80 value 307.817288
## iter  90 value 304.779370
## iter 100 value 302.866522
## final  value 302.866522 
## stopped after 100 iterations
## - Fold01: decay=1e-01, size= 5 
## + Fold01: decay=1e-03, size= 5 
## # weights:  236
## initial  value 2182.710756 
## iter  10 value 564.759562
## iter  20 value 448.423638
## iter  30 value 407.371406
## iter  40 value 369.564434
## iter  50 value 334.723256
## iter  60 value 307.472348
## iter  70 value 298.316703
## iter  80 value 290.106597
## iter  90 value 284.742848
## iter 100 value 282.688549
## final  value 282.688549 
## stopped after 100 iterations
## - Fold01: decay=1e-03, size= 5 
## + Fold01: decay=1e-04, size= 5 
## # weights:  236
## initial  value 1203.346769 
## iter  10 value 482.682269
## iter  20 value 374.399647
## iter  30 value 337.744087
## iter  40 value 312.687285
## iter  50 value 294.775629
## iter  60 value 282.829615
## iter  70 value 278.194116
## iter  80 value 273.876732
## iter  90 value 262.824368
## iter 100 value 256.104150
## final  value 256.104150 
## stopped after 100 iterations
## - Fold01: decay=1e-04, size= 5 
## + Fold01: decay=1e-01, size=10 
## # weights:  471
## initial  value 1184.448943 
## iter  10 value 561.358920
## iter  20 value 391.410916
## iter  30 value 307.043492
## iter  40 value 248.546648
## iter  50 value 210.179462
## iter  60 value 185.146816
## iter  70 value 168.043164
## iter  80 value 152.964068
## iter  90 value 144.669319
## iter 100 value 137.569320
## final  value 137.569320 
## stopped after 100 iterations
## - Fold01: decay=1e-01, size=10 
## + Fold01: decay=1e-03, size=10 
## # weights:  471
## initial  value 1065.046381 
## iter  10 value 460.728595
## iter  20 value 223.775189
## iter  30 value 108.864943
## iter  40 value 66.258172
## iter  50 value 39.185178
## iter  60 value 29.275734
## iter  70 value 25.544040
## iter  80 value 22.839666
## iter  90 value 21.049097
## iter 100 value 18.546421
## final  value 18.546421 
## stopped after 100 iterations
## - Fold01: decay=1e-03, size=10 
## + Fold01: decay=1e-04, size=10 
## # weights:  471
## initial  value 973.435399 
## iter  10 value 440.447918
## iter  20 value 183.236547
## iter  30 value 106.746488
## iter  40 value 79.353258
## iter  50 value 65.728568
## iter  60 value 60.199297
## iter  70 value 58.488032
## iter  80 value 55.508328
## iter  90 value 54.820621
## iter 100 value 54.193485
## final  value 54.193485 
## stopped after 100 iterations
## - Fold01: decay=1e-04, size=10 
## + Fold01: decay=1e-01, size=15 
## # weights:  706
## initial  value 1773.952143 
## iter  10 value 567.083411
## iter  20 value 366.176940
## iter  30 value 235.131051
## iter  40 value 172.608807
## iter  50 value 140.823987
## iter  60 value 125.369146
## iter  70 value 115.355573
## iter  80 value 108.471160
## iter  90 value 103.166616
## iter 100 value 99.210009
## final  value 99.210009 
## stopped after 100 iterations
## - Fold01: decay=1e-01, size=15 
## + Fold01: decay=1e-03, size=15 
## # weights:  706
## initial  value 2494.371542 
## iter  10 value 507.102370
## iter  20 value 157.559949
## iter  30 value 38.778489
## iter  40 value 15.769361
## iter  50 value 9.505712
## iter  60 value 7.385170
## iter  70 value 6.016340
## iter  80 value 4.936752
## iter  90 value 4.374806
## iter 100 value 3.887038
## final  value 3.887038 
## stopped after 100 iterations
## - Fold01: decay=1e-03, size=15 
## + Fold01: decay=1e-04, size=15 
## # weights:  706
## initial  value 827.166912 
## iter  10 value 470.013768
## iter  20 value 198.789223
## iter  30 value 86.101937
## iter  40 value 47.527365
## iter  50 value 36.710592
## iter  60 value 28.605849
## iter  70 value 21.442787
## iter  80 value 14.324677
## iter  90 value 8.761229
## iter 100 value 7.035885
## final  value 7.035885 
## stopped after 100 iterations
## - Fold01: decay=1e-04, size=15 
## + Fold02: decay=1e-01, size= 5 
## # weights:  236
## initial  value 1043.211177 
## iter  10 value 515.573498
## iter  20 value 446.813067
## iter  30 value 416.432074
## iter  40 value 395.319084
## iter  50 value 374.620108
## iter  60 value 357.470458
## iter  70 value 340.900677
## iter  80 value 325.380772
## iter  90 value 318.821615
## iter 100 value 314.361791
## final  value 314.361791 
## stopped after 100 iterations
## - Fold02: decay=1e-01, size= 5 
## + Fold02: decay=1e-03, size= 5 
## # weights:  236
## initial  value 1176.899576 
## iter  10 value 507.096947
## iter  20 value 412.573101
## iter  30 value 343.388533
## iter  40 value 308.041028
## iter  50 value 290.911700
## iter  60 value 283.151739
## iter  70 value 279.851370
## iter  80 value 277.202414
## iter  90 value 275.774269
## iter 100 value 274.759488
## final  value 274.759488 
## stopped after 100 iterations
## - Fold02: decay=1e-03, size= 5 
## + Fold02: decay=1e-04, size= 5 
## # weights:  236
## initial  value 1300.671245 
## iter  10 value 543.101698
## iter  20 value 450.864982
## iter  30 value 418.136023
## iter  40 value 389.581099
## iter  50 value 371.387012
## iter  60 value 354.064125
## iter  70 value 341.919932
## iter  80 value 334.852003
## iter  90 value 332.125116
## iter 100 value 331.438012
## final  value 331.438012 
## stopped after 100 iterations
## - Fold02: decay=1e-04, size= 5 
## + Fold02: decay=1e-01, size=10 
## # weights:  471
## initial  value 1631.262554 
## iter  10 value 494.845614
## iter  20 value 365.585726
## iter  30 value 265.569986
## iter  40 value 219.137663
## iter  50 value 186.449026
## iter  60 value 171.148282
## iter  70 value 157.612267
## iter  80 value 149.680207
## iter  90 value 143.803729
## iter 100 value 138.853099
## final  value 138.853099 
## stopped after 100 iterations
## - Fold02: decay=1e-01, size=10 
## + Fold02: decay=1e-03, size=10 
## # weights:  471
## initial  value 1217.561916 
## iter  10 value 460.615453
## iter  20 value 229.099452
## iter  30 value 114.618383
## iter  40 value 83.742880
## iter  50 value 62.968557
## iter  60 value 55.642506
## iter  70 value 50.757025
## iter  80 value 48.855383
## iter  90 value 43.804241
## iter 100 value 42.512001
## final  value 42.512001 
## stopped after 100 iterations
## - Fold02: decay=1e-03, size=10 
## + Fold02: decay=1e-04, size=10 
## # weights:  471
## initial  value 1191.049394 
## iter  10 value 480.100664
## iter  20 value 229.029988
## iter  30 value 104.628957
## iter  40 value 74.886190
## iter  50 value 63.743215
## iter  60 value 58.632012
## iter  70 value 53.814768
## iter  80 value 51.531101
## iter  90 value 44.647777
## iter 100 value 39.957833
## final  value 39.957833 
## stopped after 100 iterations
## - Fold02: decay=1e-04, size=10 
## + Fold02: decay=1e-01, size=15 
## # weights:  706
## initial  value 1207.041842 
## iter  10 value 482.281660
## iter  20 value 265.246466
## iter  30 value 174.168636
## iter  40 value 135.762286
## iter  50 value 119.891952
## iter  60 value 110.571789
## iter  70 value 105.511290
## iter  80 value 102.189788
## iter  90 value 99.536209
## iter 100 value 96.482190
## final  value 96.482190 
## stopped after 100 iterations
## - Fold02: decay=1e-01, size=15 
## + Fold02: decay=1e-03, size=15 
## # weights:  706
## initial  value 862.737875 
## iter  10 value 444.104682
## iter  20 value 117.804309
## iter  30 value 29.730029
## iter  40 value 16.804065
## iter  50 value 12.154179
## iter  60 value 8.771459
## iter  70 value 7.040660
## iter  80 value 5.762122
## iter  90 value 4.872508
## iter 100 value 4.329584
## final  value 4.329584 
## stopped after 100 iterations
## - Fold02: decay=1e-03, size=15 
## + Fold02: decay=1e-04, size=15 
## # weights:  706
## initial  value 851.493266 
## iter  10 value 400.768220
## iter  20 value 104.222110
## iter  30 value 28.931311
## iter  40 value 10.220303
## iter  50 value 4.951407
## iter  60 value 3.316251
## iter  70 value 3.039429
## iter  80 value 1.910038
## iter  90 value 1.670032
## iter 100 value 1.493379
## final  value 1.493379 
## stopped after 100 iterations
## - Fold02: decay=1e-04, size=15 
## + Fold03: decay=1e-01, size= 5 
## # weights:  236
## initial  value 979.039355 
## iter  10 value 564.194829
## iter  20 value 425.505426
## iter  30 value 367.525480
## iter  40 value 338.932216
## iter  50 value 328.932006
## iter  60 value 316.796977
## iter  70 value 307.248145
## iter  80 value 299.075733
## iter  90 value 292.821402
## iter 100 value 288.808794
## final  value 288.808794 
## stopped after 100 iterations
## - Fold03: decay=1e-01, size= 5 
## + Fold03: decay=1e-03, size= 5 
## # weights:  236
## initial  value 986.375221 
## iter  10 value 523.384737
## iter  20 value 407.001328
## iter  30 value 331.990144
## iter  40 value 296.866820
## iter  50 value 281.469840
## iter  60 value 261.828923
## iter  70 value 255.122101
## iter  80 value 249.201428
## iter  90 value 244.783964
## iter 100 value 243.363017
## final  value 243.363017 
## stopped after 100 iterations
## - Fold03: decay=1e-03, size= 5 
## + Fold03: decay=1e-04, size= 5 
## # weights:  236
## initial  value 1461.755620 
## iter  10 value 479.071834
## iter  20 value 363.421246
## iter  30 value 296.253752
## iter  40 value 267.447185
## iter  50 value 251.915814
## iter  60 value 240.299516
## iter  70 value 235.300580
## iter  80 value 232.230290
## iter  90 value 230.159671
## iter 100 value 227.718893
## final  value 227.718893 
## stopped after 100 iterations
## - Fold03: decay=1e-04, size= 5 
## + Fold03: decay=1e-01, size=10 
## # weights:  471
## initial  value 1084.300512 
## iter  10 value 477.830416
## iter  20 value 312.558768
## iter  30 value 234.440715
## iter  40 value 196.925048
## iter  50 value 172.518026
## iter  60 value 155.573017
## iter  70 value 145.065066
## iter  80 value 138.128464
## iter  90 value 133.284485
## iter 100 value 129.247486
## final  value 129.247486 
## stopped after 100 iterations
## - Fold03: decay=1e-01, size=10 
## + Fold03: decay=1e-03, size=10 
## # weights:  471
## initial  value 910.269329 
## iter  10 value 434.220965
## iter  20 value 213.631889
## iter  30 value 124.684018
## iter  40 value 95.463833
## iter  50 value 77.743512
## iter  60 value 69.072967
## iter  70 value 63.972036
## iter  80 value 60.700226
## iter  90 value 56.972851
## iter 100 value 53.402217
## final  value 53.402217 
## stopped after 100 iterations
## - Fold03: decay=1e-03, size=10 
## + Fold03: decay=1e-04, size=10 
## # weights:  471
## initial  value 1656.628105 
## iter  10 value 474.668103
## iter  20 value 264.433722
## iter  30 value 139.668538
## iter  40 value 85.729861
## iter  50 value 73.272712
## iter  60 value 64.521542
## iter  70 value 56.359446
## iter  80 value 50.699759
## iter  90 value 47.776614
## iter 100 value 46.014379
## final  value 46.014379 
## stopped after 100 iterations
## - Fold03: decay=1e-04, size=10 
## + Fold03: decay=1e-01, size=15 
## # weights:  706
## initial  value 2401.668417 
## iter  10 value 551.807095
## iter  20 value 308.251020
## iter  30 value 196.760479
## iter  40 value 147.587524
## iter  50 value 122.213931
## iter  60 value 111.900963
## iter  70 value 107.768864
## iter  80 value 104.355440
## iter  90 value 99.913696
## iter 100 value 96.585430
## final  value 96.585430 
## stopped after 100 iterations
## - Fold03: decay=1e-01, size=15 
## + Fold03: decay=1e-03, size=15 
## # weights:  706
## initial  value 2084.615580 
## iter  10 value 430.329929
## iter  20 value 130.852672
## iter  30 value 51.896164
## iter  40 value 28.489079
## iter  50 value 15.883562
## iter  60 value 11.306570
## iter  70 value 9.484360
## iter  80 value 7.721709
## iter  90 value 6.530884
## iter 100 value 5.616876
## final  value 5.616876 
## stopped after 100 iterations
## - Fold03: decay=1e-03, size=15 
## + Fold03: decay=1e-04, size=15 
## # weights:  706
## initial  value 1985.356918 
## iter  10 value 461.934223
## iter  20 value 155.451193
## iter  30 value 45.179288
## iter  40 value 16.613076
## iter  50 value 6.673749
## iter  60 value 3.811190
## iter  70 value 2.996127
## iter  80 value 2.553329
## iter  90 value 2.333781
## iter 100 value 2.219377
## final  value 2.219377 
## stopped after 100 iterations
## - Fold03: decay=1e-04, size=15 
## + Fold04: decay=1e-01, size= 5 
## # weights:  236
## initial  value 1102.524147 
## iter  10 value 559.641870
## iter  20 value 489.719227
## iter  30 value 445.484948
## iter  40 value 420.080187
## iter  50 value 401.990772
## iter  60 value 383.993240
## iter  70 value 371.981343
## iter  80 value 365.289114
## iter  90 value 360.198795
## iter 100 value 355.943091
## final  value 355.943091 
## stopped after 100 iterations
## - Fold04: decay=1e-01, size= 5 
## + Fold04: decay=1e-03, size= 5 
## # weights:  236
## initial  value 1958.096930 
## iter  10 value 512.262447
## iter  20 value 380.881264
## iter  30 value 297.696889
## iter  40 value 250.520364
## iter  50 value 228.903053
## iter  60 value 214.483318
## iter  70 value 209.460313
## iter  80 value 206.103456
## iter  90 value 204.095760
## iter 100 value 202.805263
## final  value 202.805263 
## stopped after 100 iterations
## - Fold04: decay=1e-03, size= 5 
## + Fold04: decay=1e-04, size= 5 
## # weights:  236
## initial  value 1498.345550 
## iter  10 value 513.166538
## iter  20 value 380.650609
## iter  30 value 315.107827
## iter  40 value 284.990617
## iter  50 value 264.892188
## iter  60 value 249.213242
## iter  70 value 241.328102
## iter  80 value 234.881357
## iter  90 value 231.068399
## iter 100 value 229.089325
## final  value 229.089325 
## stopped after 100 iterations
## - Fold04: decay=1e-04, size= 5 
## + Fold04: decay=1e-01, size=10 
## # weights:  471
## initial  value 1571.783217 
## iter  10 value 490.190704
## iter  20 value 292.981835
## iter  30 value 232.963398
## iter  40 value 191.599642
## iter  50 value 174.992136
## iter  60 value 160.282160
## iter  70 value 142.293377
## iter  80 value 133.576236
## iter  90 value 127.094027
## iter 100 value 122.094318
## final  value 122.094318 
## stopped after 100 iterations
## - Fold04: decay=1e-01, size=10 
## + Fold04: decay=1e-03, size=10 
## # weights:  471
## initial  value 1083.171777 
## iter  10 value 489.053515
## iter  20 value 287.366713
## iter  30 value 189.584059
## iter  40 value 151.375966
## iter  50 value 136.070412
## iter  60 value 125.107543
## iter  70 value 114.904041
## iter  80 value 92.179243
## iter  90 value 78.215852
## iter 100 value 78.150763
## final  value 78.150763 
## stopped after 100 iterations
## - Fold04: decay=1e-03, size=10 
## + Fold04: decay=1e-04, size=10 
## # weights:  471
## initial  value 2046.630969 
## iter  10 value 475.383272
## iter  20 value 238.584992
## iter  30 value 118.800734
## iter  40 value 64.457642
## iter  50 value 39.555501
## iter  60 value 29.258540
## iter  70 value 20.901592
## iter  80 value 16.825232
## iter  90 value 14.113326
## iter 100 value 12.661468
## final  value 12.661468 
## stopped after 100 iterations
## - Fold04: decay=1e-04, size=10 
## + Fold04: decay=1e-01, size=15 
## # weights:  706
## initial  value 1203.488890 
## iter  10 value 504.591368
## iter  20 value 280.956418
## iter  30 value 174.941413
## iter  40 value 135.039295
## iter  50 value 121.384919
## iter  60 value 113.742674
## iter  70 value 108.673223
## iter  80 value 104.650123
## iter  90 value 101.715412
## iter 100 value 100.095074
## final  value 100.095074 
## stopped after 100 iterations
## - Fold04: decay=1e-01, size=15 
## + Fold04: decay=1e-03, size=15 
## # weights:  706
## initial  value 2521.025638 
## iter  10 value 500.980166
## iter  20 value 159.592363
## iter  30 value 67.879661
## iter  40 value 36.506814
## iter  50 value 22.263078
## iter  60 value 15.477222
## iter  70 value 11.005566
## iter  80 value 9.066335
## iter  90 value 7.421173
## iter 100 value 6.190534
## final  value 6.190534 
## stopped after 100 iterations
## - Fold04: decay=1e-03, size=15 
## + Fold04: decay=1e-04, size=15 
## # weights:  706
## initial  value 1074.910923 
## iter  10 value 406.654517
## iter  20 value 142.845957
## iter  30 value 64.332100
## iter  40 value 23.709762
## iter  50 value 13.210177
## iter  60 value 5.728345
## iter  70 value 3.908078
## iter  80 value 3.095541
## iter  90 value 2.375277
## iter 100 value 2.023757
## final  value 2.023757 
## stopped after 100 iterations
## - Fold04: decay=1e-04, size=15 
## + Fold05: decay=1e-01, size= 5 
## # weights:  236
## initial  value 999.744064 
## iter  10 value 510.598750
## iter  20 value 416.109297
## iter  30 value 371.201557
## iter  40 value 326.768573
## iter  50 value 301.366335
## iter  60 value 286.547544
## iter  70 value 266.750914
## iter  80 value 253.523967
## iter  90 value 243.336055
## iter 100 value 235.996121
## final  value 235.996121 
## stopped after 100 iterations
## - Fold05: decay=1e-01, size= 5 
## + Fold05: decay=1e-03, size= 5 
## # weights:  236
## initial  value 2455.228898 
## iter  10 value 488.573065
## iter  20 value 352.279682
## iter  30 value 292.448642
## iter  40 value 266.309579
## iter  50 value 241.525120
## iter  60 value 226.011057
## iter  70 value 217.508863
## iter  80 value 211.922910
## iter  90 value 208.253484
## iter 100 value 204.809414
## final  value 204.809414 
## stopped after 100 iterations
## - Fold05: decay=1e-03, size= 5 
## + Fold05: decay=1e-04, size= 5 
## # weights:  236
## initial  value 2283.208332 
## iter  10 value 493.612163
## iter  20 value 385.458603
## iter  30 value 292.962837
## iter  40 value 252.179268
## iter  50 value 231.708186
## iter  60 value 218.749956
## iter  70 value 208.559994
## iter  80 value 201.425189
## iter  90 value 198.534976
## iter 100 value 197.187696
## final  value 197.187696 
## stopped after 100 iterations
## - Fold05: decay=1e-04, size= 5 
## + Fold05: decay=1e-01, size=10 
## # weights:  471
## initial  value 1199.691845 
## iter  10 value 562.676644
## iter  20 value 348.317810
## iter  30 value 265.935109
## iter  40 value 218.298188
## iter  50 value 187.982511
## iter  60 value 166.258973
## iter  70 value 147.692726
## iter  80 value 133.570791
## iter  90 value 124.766782
## iter 100 value 119.553411
## final  value 119.553411 
## stopped after 100 iterations
## - Fold05: decay=1e-01, size=10 
## + Fold05: decay=1e-03, size=10 
## # weights:  471
## initial  value 1184.501635 
## iter  10 value 425.991592
## iter  20 value 166.776447
## iter  30 value 93.455546
## iter  40 value 71.494787
## iter  50 value 57.909872
## iter  60 value 51.442464
## iter  70 value 48.936728
## iter  80 value 47.153961
## iter  90 value 44.960904
## iter 100 value 37.447237
## final  value 37.447237 
## stopped after 100 iterations
## - Fold05: decay=1e-03, size=10 
## + Fold05: decay=1e-04, size=10 
## # weights:  471
## initial  value 1608.314580 
## iter  10 value 467.405249
## iter  20 value 231.933927
## iter  30 value 117.960363
## iter  40 value 75.010960
## iter  50 value 54.435619
## iter  60 value 42.428009
## iter  70 value 36.639845
## iter  80 value 33.270977
## iter  90 value 30.774728
## iter 100 value 30.095309
## final  value 30.095309 
## stopped after 100 iterations
## - Fold05: decay=1e-04, size=10 
## + Fold05: decay=1e-01, size=15 
## # weights:  706
## initial  value 1282.491330 
## iter  10 value 484.082396
## iter  20 value 284.598863
## iter  30 value 192.147033
## iter  40 value 149.617723
## iter  50 value 128.418819
## iter  60 value 116.589927
## iter  70 value 107.602972
## iter  80 value 101.890338
## iter  90 value 97.552414
## iter 100 value 94.110653
## final  value 94.110653 
## stopped after 100 iterations
## - Fold05: decay=1e-01, size=15 
## + Fold05: decay=1e-03, size=15 
## # weights:  706
## initial  value 1097.688067 
## iter  10 value 453.546148
## iter  20 value 120.099925
## iter  30 value 25.197441
## iter  40 value 12.819999
## iter  50 value 9.539353
## iter  60 value 7.388810
## iter  70 value 5.862827
## iter  80 value 5.008636
## iter  90 value 4.353550
## iter 100 value 3.860578
## final  value 3.860578 
## stopped after 100 iterations
## - Fold05: decay=1e-03, size=15 
## + Fold05: decay=1e-04, size=15 
## # weights:  706
## initial  value 1548.751675 
## iter  10 value 455.642434
## iter  20 value 125.719176
## iter  30 value 46.989029
## iter  40 value 20.682825
## iter  50 value 7.742673
## iter  60 value 4.151117
## iter  70 value 3.156322
## iter  80 value 2.946517
## iter  90 value 2.733424
## iter 100 value 2.541698
## final  value 2.541698 
## stopped after 100 iterations
## - Fold05: decay=1e-04, size=15 
## + Fold06: decay=1e-01, size= 5 
## # weights:  236
## initial  value 1583.789158 
## iter  10 value 535.695127
## iter  20 value 443.880970
## iter  30 value 406.435108
## iter  40 value 388.476936
## iter  50 value 368.431935
## iter  60 value 352.055790
## iter  70 value 342.573933
## iter  80 value 335.882123
## iter  90 value 326.258708
## iter 100 value 317.850397
## final  value 317.850397 
## stopped after 100 iterations
## - Fold06: decay=1e-01, size= 5 
## + Fold06: decay=1e-03, size= 5 
## # weights:  236
## initial  value 2287.591953 
## iter  10 value 552.642530
## iter  20 value 403.270374
## iter  30 value 299.675070
## iter  40 value 255.486281
## iter  50 value 239.630003
## iter  60 value 232.446913
## iter  70 value 226.648733
## iter  80 value 222.336059
## iter  90 value 217.258351
## iter 100 value 213.895971
## final  value 213.895971 
## stopped after 100 iterations
## - Fold06: decay=1e-03, size= 5 
## + Fold06: decay=1e-04, size= 5 
## # weights:  236
## initial  value 1912.038974 
## iter  10 value 569.675842
## iter  20 value 467.079351
## iter  30 value 363.798622
## iter  40 value 284.031630
## iter  50 value 248.250223
## iter  60 value 230.464251
## iter  70 value 218.786003
## iter  80 value 206.422227
## iter  90 value 201.236932
## iter 100 value 189.577461
## final  value 189.577461 
## stopped after 100 iterations
## - Fold06: decay=1e-04, size= 5 
## + Fold06: decay=1e-01, size=10 
## # weights:  471
## initial  value 2014.126445 
## iter  10 value 518.566219
## iter  20 value 360.917063
## iter  30 value 274.545453
## iter  40 value 232.645034
## iter  50 value 203.549831
## iter  60 value 187.010577
## iter  70 value 170.323751
## iter  80 value 157.953049
## iter  90 value 149.092760
## iter 100 value 141.538161
## final  value 141.538161 
## stopped after 100 iterations
## - Fold06: decay=1e-01, size=10 
## + Fold06: decay=1e-03, size=10 
## # weights:  471
## initial  value 1117.089398 
## iter  10 value 492.229508
## iter  20 value 253.649670
## iter  30 value 150.769460
## iter  40 value 103.709109
## iter  50 value 82.769764
## iter  60 value 71.612436
## iter  70 value 66.421362
## iter  80 value 62.090807
## iter  90 value 58.868617
## iter 100 value 52.785876
## final  value 52.785876 
## stopped after 100 iterations
## - Fold06: decay=1e-03, size=10 
## + Fold06: decay=1e-04, size=10 
## # weights:  471
## initial  value 2131.287177 
## iter  10 value 473.011544
## iter  20 value 280.388189
## iter  30 value 189.158715
## iter  40 value 138.172577
## iter  50 value 104.339895
## iter  60 value 89.592398
## iter  70 value 82.161205
## iter  80 value 80.741055
## iter  90 value 80.255732
## iter 100 value 78.897861
## final  value 78.897861 
## stopped after 100 iterations
## - Fold06: decay=1e-04, size=10 
## + Fold06: decay=1e-01, size=15 
## # weights:  706
## initial  value 2138.447337 
## iter  10 value 585.433142
## iter  20 value 296.770383
## iter  30 value 198.417778
## iter  40 value 150.129549
## iter  50 value 129.259013
## iter  60 value 117.873792
## iter  70 value 110.487188
## iter  80 value 105.710451
## iter  90 value 101.818272
## iter 100 value 98.810860
## final  value 98.810860 
## stopped after 100 iterations
## - Fold06: decay=1e-01, size=15 
## + Fold06: decay=1e-03, size=15 
## # weights:  706
## initial  value 1624.415020 
## iter  10 value 463.948339
## iter  20 value 117.373949
## iter  30 value 33.452305
## iter  40 value 16.285958
## iter  50 value 9.116636
## iter  60 value 7.325397
## iter  70 value 6.012745
## iter  80 value 4.947838
## iter  90 value 4.144322
## iter 100 value 3.586657
## final  value 3.586657 
## stopped after 100 iterations
## - Fold06: decay=1e-03, size=15 
## + Fold06: decay=1e-04, size=15 
## # weights:  706
## initial  value 1868.880902 
## iter  10 value 442.365127
## iter  20 value 147.321634
## iter  30 value 45.104914
## iter  40 value 13.482532
## iter  50 value 4.947180
## iter  60 value 2.120399
## iter  70 value 1.842822
## iter  80 value 1.597732
## iter  90 value 1.422724
## iter 100 value 1.264040
## final  value 1.264040 
## stopped after 100 iterations
## - Fold06: decay=1e-04, size=15 
## + Fold07: decay=1e-01, size= 5 
## # weights:  236
## initial  value 2194.205215 
## iter  10 value 592.172697
## iter  20 value 473.166212
## iter  30 value 410.894740
## iter  40 value 364.791234
## iter  50 value 340.834339
## iter  60 value 325.374640
## iter  70 value 318.137759
## iter  80 value 314.333194
## iter  90 value 312.779091
## iter 100 value 299.130734
## final  value 299.130734 
## stopped after 100 iterations
## - Fold07: decay=1e-01, size= 5 
## + Fold07: decay=1e-03, size= 5 
## # weights:  236
## initial  value 1453.566697 
## iter  10 value 459.040174
## iter  20 value 346.771065
## iter  30 value 295.578524
## iter  40 value 272.188024
## iter  50 value 260.914353
## iter  60 value 254.498040
## iter  70 value 249.696028
## iter  80 value 247.604324
## iter  90 value 245.577689
## iter 100 value 244.823306
## final  value 244.823306 
## stopped after 100 iterations
## - Fold07: decay=1e-03, size= 5 
## + Fold07: decay=1e-04, size= 5 
## # weights:  236
## initial  value 1663.534311 
## iter  10 value 531.966373
## iter  20 value 363.623665
## iter  30 value 317.319318
## iter  40 value 289.910277
## iter  50 value 270.328256
## iter  60 value 258.972327
## iter  70 value 255.732451
## iter  80 value 251.018708
## iter  90 value 244.127095
## iter 100 value 239.099429
## final  value 239.099429 
## stopped after 100 iterations
## - Fold07: decay=1e-04, size= 5 
## + Fold07: decay=1e-01, size=10 
## # weights:  471
## initial  value 1032.230770 
## iter  10 value 472.703407
## iter  20 value 295.513006
## iter  30 value 228.638621
## iter  40 value 181.207725
## iter  50 value 160.526257
## iter  60 value 147.020135
## iter  70 value 131.712663
## iter  80 value 124.739610
## iter  90 value 120.197868
## iter 100 value 117.099920
## final  value 117.099920 
## stopped after 100 iterations
## - Fold07: decay=1e-01, size=10 
## + Fold07: decay=1e-03, size=10 
## # weights:  471
## initial  value 1261.930132 
## iter  10 value 452.285788
## iter  20 value 165.031790
## iter  30 value 85.859000
## iter  40 value 57.950519
## iter  50 value 38.527798
## iter  60 value 29.915119
## iter  70 value 27.152432
## iter  80 value 25.122913
## iter  90 value 19.330671
## iter 100 value 15.973848
## final  value 15.973848 
## stopped after 100 iterations
## - Fold07: decay=1e-03, size=10 
## + Fold07: decay=1e-04, size=10 
## # weights:  471
## initial  value 1029.970147 
## iter  10 value 441.694844
## iter  20 value 182.553311
## iter  30 value 87.873523
## iter  40 value 61.538696
## iter  50 value 46.524769
## iter  60 value 35.109007
## iter  70 value 30.370848
## iter  80 value 28.298541
## iter  90 value 27.652763
## iter 100 value 26.655171
## final  value 26.655171 
## stopped after 100 iterations
## - Fold07: decay=1e-04, size=10 
## + Fold07: decay=1e-01, size=15 
## # weights:  706
## initial  value 1667.737499 
## iter  10 value 513.163277
## iter  20 value 289.884235
## iter  30 value 186.880137
## iter  40 value 142.185764
## iter  50 value 119.909183
## iter  60 value 109.672640
## iter  70 value 103.664236
## iter  80 value 99.609724
## iter  90 value 96.549164
## iter 100 value 94.649444
## final  value 94.649444 
## stopped after 100 iterations
## - Fold07: decay=1e-01, size=15 
## + Fold07: decay=1e-03, size=15 
## # weights:  706
## initial  value 1403.373388 
## iter  10 value 409.675346
## iter  20 value 89.552397
## iter  30 value 24.986954
## iter  40 value 10.079620
## iter  50 value 7.222370
## iter  60 value 6.230957
## iter  70 value 5.270230
## iter  80 value 4.715050
## iter  90 value 4.273440
## iter 100 value 4.011490
## final  value 4.011490 
## stopped after 100 iterations
## - Fold07: decay=1e-03, size=15 
## + Fold07: decay=1e-04, size=15 
## # weights:  706
## initial  value 1043.171346 
## iter  10 value 482.458432
## iter  20 value 172.244050
## iter  30 value 66.936824
## iter  40 value 26.451228
## iter  50 value 12.481101
## iter  60 value 5.934767
## iter  70 value 3.336499
## iter  80 value 2.916512
## iter  90 value 2.672404
## iter 100 value 2.441288
## final  value 2.441288 
## stopped after 100 iterations
## - Fold07: decay=1e-04, size=15 
## + Fold08: decay=1e-01, size= 5 
## # weights:  236
## initial  value 1246.894349 
## iter  10 value 553.948795
## iter  20 value 440.670574
## iter  30 value 395.362044
## iter  40 value 351.343085
## iter  50 value 330.434809
## iter  60 value 320.902252
## iter  70 value 313.789953
## iter  80 value 305.853042
## iter  90 value 298.605079
## iter 100 value 294.375636
## final  value 294.375636 
## stopped after 100 iterations
## - Fold08: decay=1e-01, size= 5 
## + Fold08: decay=1e-03, size= 5 
## # weights:  236
## initial  value 953.262085 
## iter  10 value 478.509253
## iter  20 value 357.968491
## iter  30 value 309.549476
## iter  40 value 282.512473
## iter  50 value 264.847103
## iter  60 value 257.123059
## iter  70 value 252.130520
## iter  80 value 246.490364
## iter  90 value 242.905527
## iter 100 value 236.552483
## final  value 236.552483 
## stopped after 100 iterations
## - Fold08: decay=1e-03, size= 5 
## + Fold08: decay=1e-04, size= 5 
## # weights:  236
## initial  value 1463.125083 
## iter  10 value 577.232297
## iter  20 value 424.054815
## iter  30 value 338.837726
## iter  40 value 304.512536
## iter  50 value 283.830840
## iter  60 value 267.717149
## iter  70 value 263.083008
## iter  80 value 258.398175
## iter  90 value 256.770933
## iter 100 value 255.366882
## final  value 255.366882 
## stopped after 100 iterations
## - Fold08: decay=1e-04, size= 5 
## + Fold08: decay=1e-01, size=10 
## # weights:  471
## initial  value 1025.493913 
## iter  10 value 505.713989
## iter  20 value 339.237835
## iter  30 value 268.095218
## iter  40 value 233.094773
## iter  50 value 208.836011
## iter  60 value 191.339466
## iter  70 value 175.801053
## iter  80 value 159.975857
## iter  90 value 143.890151
## iter 100 value 135.460209
## final  value 135.460209 
## stopped after 100 iterations
## - Fold08: decay=1e-01, size=10 
## + Fold08: decay=1e-03, size=10 
## # weights:  471
## initial  value 2527.106152 
## iter  10 value 639.835917
## iter  20 value 507.877782
## iter  30 value 338.469663
## iter  40 value 235.585230
## iter  50 value 191.438239
## iter  60 value 173.232819
## iter  70 value 156.504154
## iter  80 value 142.030922
## iter  90 value 125.664559
## iter 100 value 110.871906
## final  value 110.871906 
## stopped after 100 iterations
## - Fold08: decay=1e-03, size=10 
## + Fold08: decay=1e-04, size=10 
## # weights:  471
## initial  value 1608.623397 
## iter  10 value 541.181167
## iter  20 value 229.701102
## iter  30 value 106.946843
## iter  40 value 59.427025
## iter  50 value 38.721386
## iter  60 value 26.369102
## iter  70 value 23.723935
## iter  80 value 23.170946
## iter  90 value 22.277685
## iter 100 value 21.165295
## final  value 21.165295 
## stopped after 100 iterations
## - Fold08: decay=1e-04, size=10 
## + Fold08: decay=1e-01, size=15 
## # weights:  706
## initial  value 2265.355479 
## iter  10 value 514.058954
## iter  20 value 294.498558
## iter  30 value 198.742930
## iter  40 value 149.635217
## iter  50 value 126.846364
## iter  60 value 114.760677
## iter  70 value 106.774939
## iter  80 value 101.270733
## iter  90 value 96.614884
## iter 100 value 93.173983
## final  value 93.173983 
## stopped after 100 iterations
## - Fold08: decay=1e-01, size=15 
## + Fold08: decay=1e-03, size=15 
## # weights:  706
## initial  value 1323.583844 
## iter  10 value 423.841355
## iter  20 value 108.721159
## iter  30 value 25.712201
## iter  40 value 12.136976
## iter  50 value 9.941319
## iter  60 value 8.282659
## iter  70 value 6.825947
## iter  80 value 5.871230
## iter  90 value 5.259565
## iter 100 value 4.689273
## final  value 4.689273 
## stopped after 100 iterations
## - Fold08: decay=1e-03, size=15 
## + Fold08: decay=1e-04, size=15 
## # weights:  706
## initial  value 905.992717 
## iter  10 value 455.270897
## iter  20 value 103.134631
## iter  30 value 20.406571
## iter  40 value 5.843774
## iter  50 value 1.896298
## iter  60 value 1.459136
## iter  70 value 1.354067
## iter  80 value 1.235575
## iter  90 value 1.144190
## iter 100 value 1.054098
## final  value 1.054098 
## stopped after 100 iterations
## - Fold08: decay=1e-04, size=15 
## + Fold09: decay=1e-01, size= 5 
## # weights:  236
## initial  value 862.156124 
## iter  10 value 538.241448
## iter  20 value 430.575776
## iter  30 value 369.149183
## iter  40 value 324.166724
## iter  50 value 302.797444
## iter  60 value 285.646834
## iter  70 value 270.919667
## iter  80 value 257.996099
## iter  90 value 248.124620
## iter 100 value 244.699379
## final  value 244.699379 
## stopped after 100 iterations
## - Fold09: decay=1e-01, size= 5 
## + Fold09: decay=1e-03, size= 5 
## # weights:  236
## initial  value 1610.349070 
## iter  10 value 499.744664
## iter  20 value 385.782579
## iter  30 value 343.252870
## iter  40 value 314.902578
## iter  50 value 305.123880
## iter  60 value 297.457679
## iter  70 value 290.390649
## iter  80 value 288.465321
## iter  90 value 286.357373
## iter 100 value 284.687559
## final  value 284.687559 
## stopped after 100 iterations
## - Fold09: decay=1e-03, size= 5 
## + Fold09: decay=1e-04, size= 5 
## # weights:  236
## initial  value 1406.441393 
## iter  10 value 484.070229
## iter  20 value 365.007027
## iter  30 value 294.661720
## iter  40 value 249.832363
## iter  50 value 228.143805
## iter  60 value 217.809324
## iter  70 value 211.909080
## iter  80 value 208.353382
## iter  90 value 207.031061
## iter 100 value 206.533841
## final  value 206.533841 
## stopped after 100 iterations
## - Fold09: decay=1e-04, size= 5 
## + Fold09: decay=1e-01, size=10 
## # weights:  471
## initial  value 1623.215072 
## iter  10 value 500.683179
## iter  20 value 321.082144
## iter  30 value 244.609945
## iter  40 value 191.891600
## iter  50 value 167.432927
## iter  60 value 154.097278
## iter  70 value 145.313940
## iter  80 value 137.279062
## iter  90 value 130.821596
## iter 100 value 124.234682
## final  value 124.234682 
## stopped after 100 iterations
## - Fold09: decay=1e-01, size=10 
## + Fold09: decay=1e-03, size=10 
## # weights:  471
## initial  value 2273.155144 
## iter  10 value 503.371595
## iter  20 value 248.567688
## iter  30 value 150.608171
## iter  40 value 118.004326
## iter  50 value 89.488824
## iter  60 value 81.463784
## iter  70 value 73.780747
## iter  80 value 69.580463
## iter  90 value 65.073285
## iter 100 value 59.779447
## final  value 59.779447 
## stopped after 100 iterations
## - Fold09: decay=1e-03, size=10 
## + Fold09: decay=1e-04, size=10 
## # weights:  471
## initial  value 955.219901 
## iter  10 value 470.412730
## iter  20 value 214.355623
## iter  30 value 102.740109
## iter  40 value 63.228214
## iter  50 value 49.289471
## iter  60 value 40.551725
## iter  70 value 33.342558
## iter  80 value 31.008720
## iter  90 value 28.942357
## iter 100 value 26.362076
## final  value 26.362076 
## stopped after 100 iterations
## - Fold09: decay=1e-04, size=10 
## + Fold09: decay=1e-01, size=15 
## # weights:  706
## initial  value 1727.430886 
## iter  10 value 528.142688
## iter  20 value 285.949150
## iter  30 value 170.878610
## iter  40 value 132.311818
## iter  50 value 118.343578
## iter  60 value 108.363103
## iter  70 value 101.974091
## iter  80 value 98.671418
## iter  90 value 96.697177
## iter 100 value 95.115663
## final  value 95.115663 
## stopped after 100 iterations
## - Fold09: decay=1e-01, size=15 
## + Fold09: decay=1e-03, size=15 
## # weights:  706
## initial  value 1337.025383 
## iter  10 value 435.360641
## iter  20 value 175.973571
## iter  30 value 64.359704
## iter  40 value 29.123220
## iter  50 value 20.541539
## iter  60 value 15.346945
## iter  70 value 12.724346
## iter  80 value 10.892238
## iter  90 value 9.642380
## iter 100 value 8.772000
## final  value 8.772000 
## stopped after 100 iterations
## - Fold09: decay=1e-03, size=15 
## + Fold09: decay=1e-04, size=15 
## # weights:  706
## initial  value 3695.357409 
## iter  10 value 424.136298
## iter  20 value 144.964391
## iter  30 value 58.290401
## iter  40 value 19.326504
## iter  50 value 7.408809
## iter  60 value 3.003654
## iter  70 value 2.214451
## iter  80 value 1.988821
## iter  90 value 1.802434
## iter 100 value 1.663191
## final  value 1.663191 
## stopped after 100 iterations
## - Fold09: decay=1e-04, size=15 
## + Fold10: decay=1e-01, size= 5 
## # weights:  236
## initial  value 2113.230449 
## iter  10 value 537.905411
## iter  20 value 445.966202
## iter  30 value 397.171824
## iter  40 value 361.929433
## iter  50 value 345.292124
## iter  60 value 331.142136
## iter  70 value 313.601017
## iter  80 value 300.908354
## iter  90 value 292.819812
## iter 100 value 284.099386
## final  value 284.099386 
## stopped after 100 iterations
## - Fold10: decay=1e-01, size= 5 
## + Fold10: decay=1e-03, size= 5 
## # weights:  236
## initial  value 1596.954882 
## iter  10 value 498.462475
## iter  20 value 373.977820
## iter  30 value 310.621706
## iter  40 value 266.433637
## iter  50 value 245.184234
## iter  60 value 232.910965
## iter  70 value 226.479351
## iter  80 value 222.821409
## iter  90 value 221.700108
## iter 100 value 221.335210
## final  value 221.335210 
## stopped after 100 iterations
## - Fold10: decay=1e-03, size= 5 
## + Fold10: decay=1e-04, size= 5 
## # weights:  236
## initial  value 1065.565561 
## iter  10 value 517.863538
## iter  20 value 365.424652
## iter  30 value 306.322554
## iter  40 value 285.453606
## iter  50 value 272.378162
## iter  60 value 262.069910
## iter  70 value 255.154747
## iter  80 value 251.763118
## iter  90 value 250.534672
## iter 100 value 249.897334
## final  value 249.897334 
## stopped after 100 iterations
## - Fold10: decay=1e-04, size= 5 
## + Fold10: decay=1e-01, size=10 
## # weights:  471
## initial  value 2254.813392 
## iter  10 value 513.286152
## iter  20 value 361.125589
## iter  30 value 275.197721
## iter  40 value 224.873087
## iter  50 value 194.433842
## iter  60 value 175.209013
## iter  70 value 159.682478
## iter  80 value 150.327664
## iter  90 value 143.192834
## iter 100 value 137.328453
## final  value 137.328453 
## stopped after 100 iterations
## - Fold10: decay=1e-01, size=10 
## + Fold10: decay=1e-03, size=10 
## # weights:  471
## initial  value 1008.178691 
## iter  10 value 432.980312
## iter  20 value 204.280126
## iter  30 value 126.107780
## iter  40 value 104.041066
## iter  50 value 87.968478
## iter  60 value 75.447524
## iter  70 value 70.711279
## iter  80 value 67.385722
## iter  90 value 63.852615
## iter 100 value 55.274948
## final  value 55.274948 
## stopped after 100 iterations
## - Fold10: decay=1e-03, size=10 
## + Fold10: decay=1e-04, size=10 
## # weights:  471
## initial  value 1101.790737 
## iter  10 value 445.560267
## iter  20 value 245.248779
## iter  30 value 126.110531
## iter  40 value 87.726106
## iter  50 value 64.956253
## iter  60 value 57.264500
## iter  70 value 52.365549
## iter  80 value 46.355092
## iter  90 value 42.476742
## iter 100 value 40.430584
## final  value 40.430584 
## stopped after 100 iterations
## - Fold10: decay=1e-04, size=10 
## + Fold10: decay=1e-01, size=15 
## # weights:  706
## initial  value 866.473120 
## iter  10 value 465.791478
## iter  20 value 285.918207
## iter  30 value 198.953773
## iter  40 value 140.019519
## iter  50 value 114.931733
## iter  60 value 101.341437
## iter  70 value 95.267505
## iter  80 value 91.425853
## iter  90 value 88.352728
## iter 100 value 85.855699
## final  value 85.855699 
## stopped after 100 iterations
## - Fold10: decay=1e-01, size=15 
## + Fold10: decay=1e-03, size=15 
## # weights:  706
## initial  value 882.340598 
## iter  10 value 403.987152
## iter  20 value 90.213184
## iter  30 value 28.740914
## iter  40 value 13.097514
## iter  50 value 9.580189
## iter  60 value 7.725858
## iter  70 value 6.026861
## iter  80 value 5.229900
## iter  90 value 4.614470
## iter 100 value 4.131511
## final  value 4.131511 
## stopped after 100 iterations
## - Fold10: decay=1e-03, size=15 
## + Fold10: decay=1e-04, size=15 
## # weights:  706
## initial  value 937.898038 
## iter  10 value 440.524045
## iter  20 value 106.035907
## iter  30 value 20.507718
## iter  40 value 5.445012
## iter  50 value 1.551716
## iter  60 value 1.210146
## iter  70 value 1.109653
## iter  80 value 1.024035
## iter  90 value 0.935546
## iter 100 value 0.864462
## final  value 0.864462 
## stopped after 100 iterations
## - Fold10: decay=1e-04, size=15 
## Aggregating results
## Selecting tuning parameters
## Fitting size = 15, decay = 0.001 on full training set
## # weights:  706
## initial  value 1715.773447 
## iter  10 value 471.833836
## iter  20 value 109.411556
## iter  30 value 26.839104
## iter  40 value 12.232679
## iter  50 value 9.025636
## iter  60 value 7.296955
## iter  70 value 5.983752
## iter  80 value 5.105445
## iter  90 value 4.457610
## iter 100 value 4.033468
## final  value 4.033468 
## stopped after 100 iterations

Model results

print(nn_model)
## Neural Network 
## 
## 2059 samples
##   31 predictor
##    2 classes: 'No', 'Yes' 
## 
## Pre-processing: centered (45), scaled (45) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 1853, 1853, 1852, 1854, 1853, 1853, ... 
## Resampling results across tuning parameters:
## 
##   decay  size  ROC        Sens       Spec     
##   1e-04   5    0.8959274  0.9426872  0.6839572
##   1e-04  10    0.9173276  0.9507562  0.7588235
##   1e-04  15    0.9083198  0.9542277  0.7860963
##   1e-03   5    0.8955856  0.9501714  0.6444742
##   1e-03  10    0.9232351  0.9461151  0.8011586
##   1e-03  15    0.9308616  0.9629352  0.8494652
##   1e-01   5    0.9030245  0.9467133  0.6417112
##   1e-01  10    0.9286096  0.9664101  0.7889483
##   1e-01  15    0.9281599  0.9612045  0.7861854
## 
## ROC was used to select the optimal model using the largest value.
## The final values used for the model were size = 15 and decay = 0.001.
plot(nn_model)

Variable importance

varImp(nn_model)
## nnet variable importance
## 
##   only 20 most important variables shown (out of 45)
## 
##                                 Overall
## DailyRate                        100.00
## Age                               96.65
## OverTimeYes                       95.69
## HourlyRate                        94.31
## JobInvolvement                    85.32
## GenderMale                        79.70
## JobSatisfaction                   75.66
## WorkLifeBalance                   75.60
## NumCompaniesWorked                75.60
## MaritalStatusMarried              73.56
## DistanceFromHome                  72.43
## YearsInCurrentRole                71.29
## JobRoleManufacturing Director     69.84
## EducationFieldMarketing           69.83
## YearsSinceLastPromotion           66.05
## StockOptionLevel                  64.95
## BusinessTravelTravel_Frequently   63.10
## TrainingTimesLastYear             61.82
## TotalWorkingYears                 60.92
## RelationshipSatisfaction          59.23
plot(varImp(nn_model))

Measuring Model Performance
Prediction on Test data

test.pred <- predict(nn_model, newdata=test.data)
test.confusion.m <- confusionMatrix(test.pred, test.data$Attrition)
test.confusion.m
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  No Yes
##        No  711  28
##        Yes  28 114
##                                           
##                Accuracy : 0.9364          
##                  95% CI : (0.9182, 0.9516)
##     No Information Rate : 0.8388          
##     P-Value [Acc > NIR] : <2e-16          
##                                           
##                   Kappa : 0.7649          
##  Mcnemar's Test P-Value : 1               
##                                           
##             Sensitivity : 0.9621          
##             Specificity : 0.8028          
##          Pos Pred Value : 0.9621          
##          Neg Pred Value : 0.8028          
##              Prevalence : 0.8388          
##          Detection Rate : 0.8070          
##    Detection Prevalence : 0.8388          
##       Balanced Accuracy : 0.8825          
##                                           
##        'Positive' Class : No              
## 

Add scoring data on the test data set

test.data$predict.class <-
  predict(nn_model, test.data, type = "raw")
test.data$predict.score <-
  predict(nn_model, test.data, type = "prob")

Deciling function to create rank order matrix and to understand the accuracy of the nn model

#Deciling function
decile <- function(x) {
  deciles <- vector(length = 10)
  for (i in seq(0.1, 1, .1)) {
    deciles[i * 10] <- quantile(x, i, na.rm = T)
  }
    return (ifelse(x < deciles[1], 1,
                 ifelse(
                   x < deciles[2], 2,
                   ifelse(x < deciles[3], 3,
                          ifelse(
                            x < deciles[4], 4,
                            ifelse(x < deciles[5], 5,
                                   ifelse(
                                     x < deciles[6], 6,
                                     ifelse(x < deciles[7], 7,
                                            ifelse(x < deciles[8], 8,
                                                   ifelse(x < deciles[9], 9, 10)))
                                   ))
                          ))
                 )))
}

Deciling

test.data$deciles <- decile(test.data$predict.score[, 2])

Ranking code

# make a copy of final data for any unseen error and calculate KS statistics
tmp_DT <- data.table(test.data)
rank <- tmp_DT[, list(
  cnt = length(Attrition),
  cnt_resp = sum(ifelse(Attrition == 'Yes', 1, 0)),
  cnt_non_resp = sum(ifelse(Attrition == 'No', 1, 0))
) ,
by = deciles][order(-deciles)]
rank$rrate <- round(rank$cnt_resp * 100 / rank$cnt, 2)

rank$cum_resp <- cumsum(rank$cnt_resp)
rank$cum_non_resp <- cumsum(rank$cnt_non_resp)
rank$cum_rel_resp <- round(rank$cum_resp / sum(rank$cnt_resp), 2)

rank$cum_rel_non_resp <- round(rank$cum_non_resp / sum(rank$cnt_non_resp), 2)

rank$ks <- abs(rank$cum_rel_resp - rank$cum_rel_non_resp)
rank
##    deciles cnt cnt_resp cnt_non_resp rrate cum_resp cum_non_resp
## 1:      10  89       77           12 86.52       77           12
## 2:       9  88       48           40 54.55      125           52
## 3:       8  87        5           82  5.75      130          134
## 4:       7  89        0           89  0.00      130          223
## 5:       6  88        2           86  2.27      132          309
## 6:       5 440       10          430  2.27      142          739
##    cum_rel_resp cum_rel_non_resp   ks
## 1:         0.54             0.02 0.52
## 2:         0.88             0.07 0.81
## 3:         0.92             0.18 0.74
## 4:         0.92             0.30 0.62
## 5:         0.93             0.42 0.51
## 6:         1.00             1.00 0.00

Plot ROC and AUC Curve

# Plot ROC and Compute AUC for predicting Class with the model
prob <- predict(nn_model, newdata = test.data, prob = TRUE)

rocCurve<-roc(response=test.data$Attrition,predictor=test.data$predict.score[,2]
              ,levels = rev(levels(test.data$Attrition)))
auc(rocCurve)
## Area under the curve: 0.9329
ci.auc(rocCurve)
## 95% CI: 0.903-0.9628 (DeLong)
plot.roc(rocCurve)

Plot True/False positive rates

predvec <- ifelse(prob == "Yes", 1, 0)
realvec <- ifelse(test.data$Attrition == "Yes", 1, 0)
pr <- prediction(predvec, realvec)

pref <- performance(pr, "tpr", "fpr")
plot(pref)

KS <- max(attr(pref, 'y.values')[[1]] - attr(pref, 'x.values')[[1]])
KS
## [1] 0.7649279

AUC calculation

auc <- performance(pr, measure = "auc")
auc <- auc@y.values[[1]]
auc
## [1] 0.8824639