## Step 1: Collecting data
#I found this database in kaggle, and it was created to identify a voice as male or female, based upon acoustic properties of the voice and speech. The dataset consists of 3,168 recorded voice samples, collected from male and female speakers. The voice samples are pre-processed by acoustic analysis in R using the seewave and tuneR packages, with an analyzed frequency range of 0hz-280hz
#I choosed this dataset for Classification Analysis
## Step 2: Exploring and preparing the data ----
# read in data and examine structure
rdata <- read.csv("voice.csv")
str(rdata)
## 'data.frame': 3168 obs. of 21 variables:
## $ meanfreq: num 0.0598 0.066 0.0773 0.1512 0.1351 ...
## $ sd : num 0.0642 0.0673 0.0838 0.0721 0.0791 ...
## $ median : num 0.032 0.0402 0.0367 0.158 0.1247 ...
## $ Q25 : num 0.0151 0.0194 0.0087 0.0966 0.0787 ...
## $ Q75 : num 0.0902 0.0927 0.1319 0.208 0.206 ...
## $ IQR : num 0.0751 0.0733 0.1232 0.1114 0.1273 ...
## $ skew : num 12.86 22.42 30.76 1.23 1.1 ...
## $ kurt : num 274.4 634.61 1024.93 4.18 4.33 ...
## $ sp.ent : num 0.893 0.892 0.846 0.963 0.972 ...
## $ sfm : num 0.492 0.514 0.479 0.727 0.784 ...
## $ mode : num 0 0 0 0.0839 0.1043 ...
## $ centroid: num 0.0598 0.066 0.0773 0.1512 0.1351 ...
## $ meanfun : num 0.0843 0.1079 0.0987 0.089 0.1064 ...
## $ minfun : num 0.0157 0.0158 0.0157 0.0178 0.0169 ...
## $ maxfun : num 0.276 0.25 0.271 0.25 0.267 ...
## $ meandom : num 0.00781 0.00901 0.00799 0.2015 0.71281 ...
## $ mindom : num 0.00781 0.00781 0.00781 0.00781 0.00781 ...
## $ maxdom : num 0.00781 0.05469 0.01562 0.5625 5.48438 ...
## $ dfrange : num 0 0.04688 0.00781 0.55469 5.47656 ...
## $ modindx : num 0 0.0526 0.0465 0.2471 0.2083 ...
## $ label : Factor w/ 2 levels "female","male": 2 2 2 2 2 2 2 2 2 2 ...
summary(rdata)
## meanfreq sd median Q25
## Min. :0.03936 Min. :0.01836 Min. :0.01097 Min. :0.0002288
## 1st Qu.:0.16366 1st Qu.:0.04195 1st Qu.:0.16959 1st Qu.:0.1110865
## Median :0.18484 Median :0.05916 Median :0.19003 Median :0.1402864
## Mean :0.18091 Mean :0.05713 Mean :0.18562 Mean :0.1404556
## 3rd Qu.:0.19915 3rd Qu.:0.06702 3rd Qu.:0.21062 3rd Qu.:0.1759388
## Max. :0.25112 Max. :0.11527 Max. :0.26122 Max. :0.2473469
## Q75 IQR skew kurt
## Min. :0.04295 Min. :0.01456 Min. : 0.1417 Min. : 2.068
## 1st Qu.:0.20875 1st Qu.:0.04256 1st Qu.: 1.6496 1st Qu.: 5.670
## Median :0.22568 Median :0.09428 Median : 2.1971 Median : 8.319
## Mean :0.22476 Mean :0.08431 Mean : 3.1402 Mean : 36.569
## 3rd Qu.:0.24366 3rd Qu.:0.11418 3rd Qu.: 2.9317 3rd Qu.: 13.649
## Max. :0.27347 Max. :0.25223 Max. :34.7255 Max. :1309.613
## sp.ent sfm mode centroid
## Min. :0.7387 Min. :0.03688 Min. :0.0000 Min. :0.03936
## 1st Qu.:0.8618 1st Qu.:0.25804 1st Qu.:0.1180 1st Qu.:0.16366
## Median :0.9018 Median :0.39634 Median :0.1866 Median :0.18484
## Mean :0.8951 Mean :0.40822 Mean :0.1653 Mean :0.18091
## 3rd Qu.:0.9287 3rd Qu.:0.53368 3rd Qu.:0.2211 3rd Qu.:0.19915
## Max. :0.9820 Max. :0.84294 Max. :0.2800 Max. :0.25112
## meanfun minfun maxfun meandom
## Min. :0.05557 Min. :0.009775 Min. :0.1031 Min. :0.007812
## 1st Qu.:0.11700 1st Qu.:0.018223 1st Qu.:0.2540 1st Qu.:0.419828
## Median :0.14052 Median :0.046110 Median :0.2712 Median :0.765795
## Mean :0.14281 Mean :0.036802 Mean :0.2588 Mean :0.829211
## 3rd Qu.:0.16958 3rd Qu.:0.047904 3rd Qu.:0.2775 3rd Qu.:1.177166
## Max. :0.23764 Max. :0.204082 Max. :0.2791 Max. :2.957682
## mindom maxdom dfrange modindx
## Min. :0.004883 Min. : 0.007812 Min. : 0.000 Min. :0.00000
## 1st Qu.:0.007812 1st Qu.: 2.070312 1st Qu.: 2.045 1st Qu.:0.09977
## Median :0.023438 Median : 4.992188 Median : 4.945 Median :0.13936
## Mean :0.052647 Mean : 5.047277 Mean : 4.995 Mean :0.17375
## 3rd Qu.:0.070312 3rd Qu.: 7.007812 3rd Qu.: 6.992 3rd Qu.:0.20918
## Max. :0.458984 Max. :21.867188 Max. :21.844 Max. :0.93237
## label
## female:1584
## male :1584
##
##
##
##
# custom normalization function
normalize <- function(x) {
return((x - min(x)) / (max(x) - min(x)))
}
# apply normalization to entire data frame
rdata_norm <- as.data.frame(lapply(rdata[-21], normalize))
rdata_norm$label<-rdata$label
summary(rdata_norm)
## meanfreq sd median Q25
## Min. :0.0000 Min. :0.0000 Min. :0.0000 Min. :0.0000
## 1st Qu.:0.5870 1st Qu.:0.2434 1st Qu.:0.6338 1st Qu.:0.4486
## Median :0.6870 Median :0.4209 Median :0.7155 Median :0.5668
## Mean :0.6684 Mean :0.4000 Mean :0.6979 Mean :0.5674
## 3rd Qu.:0.7545 3rd Qu.:0.5021 3rd Qu.:0.7978 3rd Qu.:0.7110
## Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000
## Q75 IQR skew kurt
## Min. :0.0000 Min. :0.0000 Min. :0.00000 Min. :0.000000
## 1st Qu.:0.7192 1st Qu.:0.1178 1st Qu.:0.04360 1st Qu.:0.002754
## Median :0.7927 Median :0.3354 Median :0.05943 Median :0.004780
## Mean :0.7887 Mean :0.2935 Mean :0.08670 Mean :0.026385
## 3rd Qu.:0.8707 3rd Qu.:0.4191 3rd Qu.:0.08067 3rd Qu.:0.008857
## Max. :1.0000 Max. :1.0000 Max. :1.00000 Max. :1.000000
## sp.ent sfm mode centroid
## Min. :0.0000 Min. :0.0000 Min. :0.0000 Min. :0.0000
## 1st Qu.:0.5061 1st Qu.:0.2744 1st Qu.:0.4215 1st Qu.:0.5870
## Median :0.6703 Median :0.4459 Median :0.6664 Median :0.6870
## Mean :0.6430 Mean :0.4607 Mean :0.5903 Mean :0.6684
## 3rd Qu.:0.7810 3rd Qu.:0.6163 3rd Qu.:0.7897 3rd Qu.:0.7545
## Max. :1.0000 Max. :1.0000 Max. :1.0000 Max. :1.0000
## meanfun minfun maxfun meandom
## Min. :0.0000 Min. :0.00000 Min. :0.0000 Min. :0.0000
## 1st Qu.:0.3374 1st Qu.:0.04348 1st Qu.:0.8571 1st Qu.:0.1397
## Median :0.4666 Median :0.18699 Median :0.9550 Median :0.2570
## Mean :0.4792 Mean :0.13909 Mean :0.8848 Mean :0.2785
## 3rd Qu.:0.6262 3rd Qu.:0.19623 3rd Qu.:0.9906 3rd Qu.:0.3964
## Max. :1.0000 Max. :1.00000 Max. :1.0000 Max. :1.0000
## mindom maxdom dfrange modindx
## Min. :0.000000 Min. :0.00000 Min. :0.00000 Min. :0.0000
## 1st Qu.:0.006452 1st Qu.:0.09435 1st Qu.:0.09362 1st Qu.:0.1070
## Median :0.040860 Median :0.22802 Median :0.22639 Median :0.1495
## Mean :0.105184 Mean :0.23054 Mean :0.22865 Mean :0.1864
## 3rd Qu.:0.144086 3rd Qu.:0.32023 3rd Qu.:0.32010 3rd Qu.:0.2244
## Max. :1.000000 Max. :1.00000 Max. :1.00000 Max. :1.0000
## label
## female:1584
## male :1584
##
##
##
##
boxplot(rdata_norm[-21], range = 1.5, pars = list(boxwex = 0.8, staplewex = 0.5, outwex = 0.5),names=(1:20),outline = F)
# create training and test data
indx = sample(1:nrow(rdata_norm), as.integer(0.9*nrow(rdata_norm)))
train <- rdata_norm[indx, ]
test <- rdata_norm[-indx, ]
## Step 3: Training a model on the data ----
library(caret)
## Loading required package: lattice
## Loading required package: ggplot2
TrainingParameters <- trainControl(method="cv", number=10, savePredictions=TRUE, classProbs=TRUE)
NNModel <- train(train[,-21], train$label,method = "nnet",trControl= TrainingParameters,
tuneGrid = data.frame(size = 5, decay = 0))
## Loading required package: nnet
## # weights: 111
## initial value 1762.084635
## iter 10 value 361.758590
## iter 20 value 190.271091
## iter 30 value 158.969467
## iter 40 value 139.305216
## iter 50 value 123.762212
## iter 60 value 110.855737
## iter 70 value 105.669146
## iter 80 value 101.365283
## iter 90 value 96.994504
## iter 100 value 94.477773
## final value 94.477773
## stopped after 100 iterations
## # weights: 111
## initial value 1737.997856
## iter 10 value 246.004016
## iter 20 value 172.511486
## iter 30 value 126.192279
## iter 40 value 96.388010
## iter 50 value 81.873169
## iter 60 value 73.523871
## iter 70 value 66.889940
## iter 80 value 59.258418
## iter 90 value 54.040519
## iter 100 value 50.213443
## final value 50.213443
## stopped after 100 iterations
## # weights: 111
## initial value 1772.938473
## iter 10 value 789.561592
## iter 20 value 486.120551
## iter 30 value 199.455531
## iter 40 value 161.845410
## iter 50 value 154.348005
## iter 60 value 153.126971
## iter 70 value 152.281944
## iter 80 value 148.215743
## iter 90 value 146.881375
## iter 100 value 141.860226
## final value 141.860226
## stopped after 100 iterations
## # weights: 111
## initial value 1876.208603
## iter 10 value 523.035266
## iter 20 value 172.927945
## iter 30 value 138.122410
## iter 40 value 120.460152
## iter 50 value 110.917441
## iter 60 value 108.313034
## iter 70 value 99.865323
## iter 80 value 93.043681
## iter 90 value 88.821743
## iter 100 value 81.603062
## final value 81.603062
## stopped after 100 iterations
## # weights: 111
## initial value 1743.376329
## iter 10 value 307.613080
## iter 20 value 173.165123
## iter 30 value 143.239189
## iter 40 value 136.700340
## iter 50 value 130.322533
## iter 60 value 124.642993
## iter 70 value 120.714286
## iter 80 value 116.958298
## iter 90 value 111.479593
## iter 100 value 100.994355
## final value 100.994355
## stopped after 100 iterations
## # weights: 111
## initial value 1809.463109
## iter 10 value 252.330118
## iter 20 value 179.611583
## iter 30 value 137.585329
## iter 40 value 105.241282
## iter 50 value 76.084046
## iter 60 value 61.987328
## iter 70 value 57.088857
## iter 80 value 55.393523
## iter 90 value 54.453957
## iter 100 value 53.630564
## final value 53.630564
## stopped after 100 iterations
## # weights: 111
## initial value 2167.519136
## iter 10 value 1622.455433
## iter 20 value 246.818044
## iter 30 value 211.360234
## iter 40 value 201.948378
## iter 50 value 198.841922
## iter 60 value 196.889204
## iter 70 value 196.798299
## final value 196.798273
## converged
## # weights: 111
## initial value 2119.423551
## iter 10 value 391.119483
## iter 20 value 170.776951
## iter 30 value 135.088184
## iter 40 value 119.047011
## iter 50 value 111.120945
## iter 60 value 104.968068
## iter 70 value 101.147471
## iter 80 value 95.495811
## iter 90 value 92.651684
## iter 100 value 89.990484
## final value 89.990484
## stopped after 100 iterations
## # weights: 111
## initial value 1753.760333
## iter 10 value 478.730570
## iter 20 value 175.536354
## iter 30 value 136.947568
## iter 40 value 117.726153
## iter 50 value 102.500452
## iter 60 value 92.737130
## iter 70 value 87.412539
## iter 80 value 78.755533
## iter 90 value 71.809052
## iter 100 value 67.951705
## final value 67.951705
## stopped after 100 iterations
## # weights: 111
## initial value 1916.149217
## iter 10 value 438.257049
## iter 20 value 198.362714
## iter 30 value 154.295726
## iter 40 value 120.380561
## iter 50 value 101.495900
## iter 60 value 93.347862
## iter 70 value 87.287215
## iter 80 value 83.289548
## iter 90 value 81.489633
## iter 100 value 80.942492
## final value 80.942492
## stopped after 100 iterations
## # weights: 111
## initial value 2127.756360
## iter 10 value 675.008359
## iter 20 value 219.959529
## iter 30 value 182.682257
## iter 40 value 165.815283
## iter 50 value 150.912853
## iter 60 value 136.989080
## iter 70 value 126.114569
## iter 80 value 121.355538
## iter 90 value 119.995597
## iter 100 value 118.601174
## final value 118.601174
## stopped after 100 iterations
NNModel
## Neural Network
##
## 2851 samples
## 20 predictor
## 2 classes: 'female', 'male'
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 2566, 2565, 2566, 2566, 2566, 2566, ...
## Resampling results:
##
## Accuracy Kappa
## 0.9729873 0.9459746
##
## Tuning parameter 'size' was held constant at a value of 5
## Tuning
## parameter 'decay' was held constant at a value of 0
##
SVModel <- train(label ~ ., data = train,method = "svmPoly",Process = c("pca"),
trControl= TrainingParameters,tuneGrid = data.frame(degree = 1,scale = 1,C = 1))
## Loading required package: kernlab
##
## Attaching package: 'kernlab'
## The following object is masked from 'package:ggplot2':
##
## alpha
SVModel
## Support Vector Machines with Polynomial Kernel
##
## 2851 samples
## 20 predictor
## 2 classes: 'female', 'male'
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 2567, 2565, 2566, 2566, 2566, 2566, ...
## Resampling results:
##
## Accuracy Kappa
## 0.9743945 0.9487876
##
## Tuning parameter 'degree' was held constant at a value of 1
##
## Tuning parameter 'scale' was held constant at a value of 1
##
## Tuning parameter 'C' was held constant at a value of 1
##
## Step 4: Evaluate model performance ----
NNPredictions <-predict(NNModel, test)
cm1 <-confusionMatrix(NNPredictions, test$label)
cm1
## Confusion Matrix and Statistics
##
## Reference
## Prediction female male
## female 156 8
## male 5 148
##
## Accuracy : 0.959
## 95% CI : (0.9309, 0.978)
## No Information Rate : 0.5079
## P-Value [Acc > NIR] : <2e-16
##
## Kappa : 0.9179
## Mcnemar's Test P-Value : 0.5791
##
## Sensitivity : 0.9689
## Specificity : 0.9487
## Pos Pred Value : 0.9512
## Neg Pred Value : 0.9673
## Prevalence : 0.5079
## Detection Rate : 0.4921
## Detection Prevalence : 0.5174
## Balanced Accuracy : 0.9588
##
## 'Positive' Class : female
##
SVMPredictions <-predict(SVModel, test)
cm2 <-confusionMatrix(SVMPredictions, test$label)
cm2
## Confusion Matrix and Statistics
##
## Reference
## Prediction female male
## female 155 4
## male 6 152
##
## Accuracy : 0.9685
## 95% CI : (0.9428, 0.9848)
## No Information Rate : 0.5079
## P-Value [Acc > NIR] : <2e-16
##
## Kappa : 0.9369
## Mcnemar's Test P-Value : 0.7518
##
## Sensitivity : 0.9627
## Specificity : 0.9744
## Pos Pred Value : 0.9748
## Neg Pred Value : 0.9620
## Prevalence : 0.5079
## Detection Rate : 0.4890
## Detection Prevalence : 0.5016
## Balanced Accuracy : 0.9685
##
## 'Positive' Class : female
##
## Step 5 ¨C improving model performance
library(caretEnsemble)
##
## Attaching package: 'caretEnsemble'
## The following object is masked from 'package:ggplot2':
##
## autoplot
econtrol <- trainControl(method="cv", number=10, savePredictions=TRUE, classProbs=TRUE)
# Create models
models <- caretList(label ~., data=train,methodList=c("svmPoly", "nnet"),trControl = econtrol)
## Warning in trControlCheck(x = trControl, y = target): x$savePredictions ==
## TRUE is depreciated. Setting to 'final' instead.
## Warning in trControlCheck(x = trControl, y = target): indexes not defined
## in trControl. Attempting to set them ourselves, so each model in the
## ensemble will have the same resampling indexes.
## # weights: 23
## initial value 1780.502770
## iter 10 value 1088.615162
## iter 20 value 361.071236
## iter 30 value 235.544600
## iter 40 value 224.453624
## iter 50 value 216.554613
## iter 60 value 206.857405
## iter 70 value 205.543855
## iter 80 value 203.089250
## iter 90 value 203.052693
## final value 203.052414
## converged
## # weights: 67
## initial value 1816.187611
## iter 10 value 626.808458
## iter 20 value 174.310399
## iter 30 value 141.120678
## iter 40 value 129.329196
## iter 50 value 122.327570
## iter 60 value 119.616712
## iter 70 value 117.509704
## iter 80 value 116.156442
## iter 90 value 115.199820
## iter 100 value 114.681799
## final value 114.681799
## stopped after 100 iterations
## # weights: 111
## initial value 2005.838645
## iter 10 value 609.186999
## iter 20 value 186.228609
## iter 30 value 124.442060
## iter 40 value 104.409217
## iter 50 value 94.548379
## iter 60 value 89.348851
## iter 70 value 85.503472
## iter 80 value 81.094660
## iter 90 value 78.812102
## iter 100 value 77.339434
## final value 77.339434
## stopped after 100 iterations
## # weights: 23
## initial value 1771.592295
## iter 10 value 781.209303
## iter 20 value 277.486248
## iter 30 value 248.452563
## iter 40 value 247.466243
## final value 247.466189
## converged
## # weights: 67
## initial value 2029.692588
## iter 10 value 1174.524019
## iter 20 value 503.097029
## iter 30 value 256.300049
## iter 40 value 224.499186
## iter 50 value 219.159983
## iter 60 value 218.261283
## iter 70 value 218.178233
## iter 80 value 218.096368
## final value 218.096298
## converged
## # weights: 111
## initial value 1820.197366
## iter 10 value 317.418037
## iter 20 value 233.588540
## iter 30 value 217.281524
## iter 40 value 214.994155
## iter 50 value 214.222051
## iter 60 value 213.552951
## iter 70 value 211.499906
## iter 80 value 209.479538
## iter 90 value 209.064892
## iter 100 value 209.014836
## final value 209.014836
## stopped after 100 iterations
## # weights: 23
## initial value 1809.777227
## iter 10 value 1092.176720
## iter 20 value 302.928321
## iter 30 value 216.781689
## iter 40 value 210.971247
## iter 50 value 205.745920
## iter 60 value 203.400789
## iter 70 value 203.214072
## iter 80 value 203.110663
## final value 203.110339
## converged
## # weights: 67
## initial value 1816.434571
## iter 10 value 1507.360358
## iter 20 value 307.398566
## iter 30 value 203.662151
## iter 40 value 180.939722
## iter 50 value 168.280292
## iter 60 value 166.447671
## iter 70 value 165.211483
## iter 80 value 163.944497
## iter 90 value 162.774533
## iter 100 value 162.285768
## final value 162.285768
## stopped after 100 iterations
## # weights: 111
## initial value 1871.208392
## iter 10 value 302.499077
## iter 20 value 169.113232
## iter 30 value 136.013568
## iter 40 value 118.701059
## iter 50 value 110.585451
## iter 60 value 102.978319
## iter 70 value 99.582078
## iter 80 value 98.237706
## iter 90 value 97.685493
## iter 100 value 97.063140
## final value 97.063140
## stopped after 100 iterations
## # weights: 23
## initial value 1904.417109
## final value 1777.911297
## converged
## # weights: 67
## initial value 1869.141152
## iter 10 value 558.203165
## iter 20 value 176.887944
## iter 30 value 154.926866
## iter 40 value 137.900884
## iter 50 value 123.970522
## iter 60 value 110.978806
## iter 70 value 102.503725
## iter 80 value 100.554596
## iter 90 value 99.787786
## iter 100 value 98.979763
## final value 98.979763
## stopped after 100 iterations
## # weights: 111
## initial value 1765.197183
## iter 10 value 673.769339
## iter 20 value 181.146350
## iter 30 value 132.238340
## iter 40 value 109.210537
## iter 50 value 93.532922
## iter 60 value 87.361007
## iter 70 value 82.520159
## iter 80 value 77.414334
## iter 90 value 72.383622
## iter 100 value 69.980128
## final value 69.980128
## stopped after 100 iterations
## # weights: 23
## initial value 1961.602137
## iter 10 value 1131.289891
## iter 20 value 293.913557
## iter 30 value 237.512365
## iter 40 value 235.806460
## iter 50 value 234.962522
## iter 50 value 234.962520
## iter 50 value 234.962520
## final value 234.962520
## converged
## # weights: 67
## initial value 1827.270195
## iter 10 value 784.008632
## iter 20 value 267.663511
## iter 30 value 230.176410
## iter 40 value 219.558321
## iter 50 value 216.279612
## iter 60 value 215.155217
## iter 70 value 212.197336
## iter 80 value 211.090627
## iter 90 value 210.713669
## iter 100 value 210.597881
## final value 210.597881
## stopped after 100 iterations
## # weights: 111
## initial value 1786.745386
## iter 10 value 913.259595
## iter 20 value 290.685336
## iter 30 value 228.971335
## iter 40 value 213.391776
## iter 50 value 206.247965
## iter 60 value 204.232089
## iter 70 value 203.644249
## iter 80 value 202.880915
## iter 90 value 201.863417
## iter 100 value 201.258296
## final value 201.258296
## stopped after 100 iterations
## # weights: 23
## initial value 1795.524588
## iter 10 value 531.573263
## iter 20 value 205.781985
## iter 30 value 193.483353
## iter 40 value 193.016222
## iter 50 value 192.753813
## final value 192.751908
## converged
## # weights: 67
## initial value 1806.317727
## iter 10 value 858.605180
## iter 20 value 251.491030
## iter 30 value 195.118864
## iter 40 value 193.115718
## iter 50 value 192.794333
## iter 60 value 192.791089
## iter 70 value 192.754747
## iter 80 value 192.469480
## iter 90 value 185.224930
## iter 100 value 166.860604
## final value 166.860604
## stopped after 100 iterations
## # weights: 111
## initial value 1917.148159
## iter 10 value 467.997554
## iter 20 value 196.288639
## iter 30 value 134.449941
## iter 40 value 126.257885
## iter 50 value 111.975161
## iter 60 value 105.819710
## iter 70 value 100.347363
## iter 80 value 78.989278
## iter 90 value 72.037976
## iter 100 value 64.424911
## final value 64.424911
## stopped after 100 iterations
## # weights: 23
## initial value 1828.514341
## iter 10 value 1318.002479
## iter 20 value 435.565969
## iter 30 value 247.174179
## iter 40 value 224.415492
## iter 50 value 222.014321
## iter 60 value 215.936862
## iter 70 value 211.857645
## iter 80 value 211.423919
## iter 90 value 211.413529
## final value 211.413502
## converged
## # weights: 67
## initial value 1780.059016
## iter 10 value 477.220205
## iter 20 value 218.559653
## iter 30 value 186.403101
## iter 40 value 175.666853
## iter 50 value 171.551196
## iter 60 value 167.144026
## iter 70 value 164.356736
## iter 80 value 163.847308
## iter 90 value 163.516784
## iter 100 value 162.078884
## final value 162.078884
## stopped after 100 iterations
## # weights: 111
## initial value 1782.562272
## iter 10 value 265.872632
## iter 20 value 195.611168
## iter 30 value 170.402086
## iter 40 value 151.807111
## iter 50 value 131.518072
## iter 60 value 113.320470
## iter 70 value 100.340720
## iter 80 value 85.543194
## iter 90 value 79.496277
## iter 100 value 71.883559
## final value 71.883559
## stopped after 100 iterations
## # weights: 23
## initial value 1954.545619
## iter 10 value 1305.200351
## iter 20 value 402.011498
## iter 30 value 266.796086
## iter 40 value 256.790170
## final value 256.788428
## converged
## # weights: 67
## initial value 1997.772007
## iter 10 value 970.827957
## iter 20 value 320.662422
## iter 30 value 282.157379
## iter 40 value 251.116543
## iter 50 value 240.085902
## iter 60 value 237.676254
## iter 70 value 233.521405
## iter 80 value 229.200429
## iter 90 value 227.908057
## iter 100 value 227.880973
## final value 227.880973
## stopped after 100 iterations
## # weights: 111
## initial value 1981.364440
## iter 10 value 576.244567
## iter 20 value 288.738048
## iter 30 value 245.012730
## iter 40 value 233.352753
## iter 50 value 226.254905
## iter 60 value 220.850847
## iter 70 value 219.117737
## iter 80 value 218.017933
## iter 90 value 217.766885
## iter 100 value 217.557531
## final value 217.557531
## stopped after 100 iterations
## # weights: 23
## initial value 1834.286988
## iter 10 value 974.293281
## iter 20 value 701.772726
## iter 30 value 303.328362
## iter 40 value 256.172220
## iter 50 value 252.626659
## iter 60 value 249.494074
## iter 70 value 248.775071
## iter 80 value 247.154344
## iter 90 value 243.219786
## iter 100 value 237.807277
## final value 237.807277
## stopped after 100 iterations
## # weights: 67
## initial value 1958.863740
## iter 10 value 746.869771
## iter 20 value 211.887335
## iter 30 value 197.418500
## iter 40 value 187.357277
## iter 50 value 179.362757
## iter 60 value 175.476408
## iter 70 value 175.001347
## iter 80 value 174.540278
## iter 90 value 169.046025
## iter 100 value 147.345298
## final value 147.345298
## stopped after 100 iterations
## # weights: 111
## initial value 2053.158926
## iter 10 value 509.605776
## iter 20 value 191.719874
## iter 30 value 135.554048
## iter 40 value 109.760267
## iter 50 value 96.826509
## iter 60 value 92.816943
## iter 70 value 90.479259
## iter 80 value 87.572239
## iter 90 value 82.221002
## iter 100 value 79.457432
## final value 79.457432
## stopped after 100 iterations
## # weights: 23
## initial value 1989.961399
## iter 10 value 776.358773
## iter 20 value 247.075551
## iter 30 value 216.809383
## iter 40 value 208.229419
## iter 50 value 205.481870
## iter 60 value 205.194693
## iter 70 value 205.162443
## iter 80 value 205.139358
## final value 205.138749
## converged
## # weights: 67
## initial value 1898.794044
## iter 10 value 619.547987
## iter 20 value 229.417966
## iter 30 value 151.096169
## iter 40 value 128.472896
## iter 50 value 110.077268
## iter 60 value 101.490888
## iter 70 value 95.975833
## iter 80 value 92.150890
## iter 90 value 87.392729
## iter 100 value 84.214702
## final value 84.214702
## stopped after 100 iterations
## # weights: 111
## initial value 1750.057978
## iter 10 value 360.253305
## iter 20 value 191.011686
## iter 30 value 182.101636
## iter 40 value 159.226983
## iter 50 value 151.125299
## iter 60 value 145.599594
## iter 70 value 139.910011
## iter 80 value 133.721853
## iter 90 value 131.479503
## iter 100 value 125.721496
## final value 125.721496
## stopped after 100 iterations
## # weights: 23
## initial value 1891.033686
## iter 10 value 1018.628525
## iter 20 value 532.357819
## iter 30 value 268.064891
## iter 40 value 248.633300
## iter 50 value 247.981313
## final value 247.981305
## converged
## # weights: 67
## initial value 1793.158733
## iter 10 value 622.711293
## iter 20 value 317.507326
## iter 30 value 250.559314
## iter 40 value 238.631390
## iter 50 value 237.951225
## iter 60 value 231.748163
## iter 70 value 228.925038
## iter 80 value 227.771093
## iter 90 value 225.228021
## iter 100 value 224.272181
## final value 224.272181
## stopped after 100 iterations
## # weights: 111
## initial value 1871.083386
## iter 10 value 586.050924
## iter 20 value 284.748151
## iter 30 value 240.725141
## iter 40 value 223.883515
## iter 50 value 216.723295
## iter 60 value 215.975641
## iter 70 value 214.065346
## iter 80 value 212.426357
## iter 90 value 212.059997
## iter 100 value 212.019066
## final value 212.019066
## stopped after 100 iterations
## # weights: 23
## initial value 1842.006642
## iter 10 value 1198.580260
## iter 20 value 256.784454
## iter 30 value 205.893209
## iter 40 value 205.216771
## iter 50 value 205.214347
## final value 205.213440
## converged
## # weights: 67
## initial value 1789.662068
## iter 10 value 393.504241
## iter 20 value 217.372793
## iter 30 value 207.178524
## iter 40 value 206.218453
## iter 50 value 200.331035
## iter 60 value 190.453079
## iter 70 value 183.124139
## iter 80 value 177.549545
## iter 90 value 173.276152
## iter 100 value 170.014436
## final value 170.014436
## stopped after 100 iterations
## # weights: 111
## initial value 1805.713204
## iter 10 value 308.822762
## iter 20 value 182.507807
## iter 30 value 142.316900
## iter 40 value 119.694989
## iter 50 value 114.401615
## iter 60 value 111.851969
## iter 70 value 110.779094
## iter 80 value 110.100195
## iter 90 value 109.909069
## iter 100 value 109.632223
## final value 109.632223
## stopped after 100 iterations
## # weights: 23
## initial value 1928.037756
## iter 10 value 1141.247125
## iter 20 value 469.357517
## iter 30 value 265.978203
## iter 40 value 202.186834
## iter 50 value 201.130907
## iter 60 value 200.358684
## iter 70 value 200.125808
## iter 80 value 200.115756
## iter 80 value 200.115755
## iter 80 value 200.115755
## final value 200.115755
## converged
## # weights: 67
## initial value 1914.203124
## iter 10 value 636.067206
## iter 20 value 253.202567
## iter 30 value 180.779992
## iter 40 value 170.397292
## iter 50 value 153.190300
## iter 60 value 146.682795
## iter 70 value 144.641485
## iter 80 value 143.780867
## iter 90 value 142.421105
## iter 100 value 140.310651
## final value 140.310651
## stopped after 100 iterations
## # weights: 111
## initial value 1766.156363
## iter 10 value 245.947876
## iter 20 value 154.006269
## iter 30 value 109.594034
## iter 40 value 93.051710
## iter 50 value 83.133231
## iter 60 value 76.544644
## iter 70 value 71.754423
## iter 80 value 62.603415
## iter 90 value 59.554735
## iter 100 value 55.395407
## final value 55.395407
## stopped after 100 iterations
## # weights: 23
## initial value 1844.111466
## iter 10 value 684.058289
## iter 20 value 282.035782
## iter 30 value 249.417083
## iter 40 value 247.806979
## iter 50 value 246.446397
## iter 60 value 246.445851
## final value 246.445841
## converged
## # weights: 67
## initial value 1790.876881
## iter 10 value 834.505056
## iter 20 value 250.328501
## iter 30 value 234.829990
## iter 40 value 228.598464
## iter 50 value 224.101700
## iter 60 value 221.839272
## iter 70 value 220.284432
## iter 80 value 217.089149
## iter 90 value 216.183920
## iter 100 value 216.004109
## final value 216.004109
## stopped after 100 iterations
## # weights: 111
## initial value 2017.176919
## iter 10 value 766.838475
## iter 20 value 380.624007
## iter 30 value 267.183888
## iter 40 value 229.201168
## iter 50 value 217.321735
## iter 60 value 209.958411
## iter 70 value 209.399801
## iter 80 value 209.190311
## iter 90 value 209.044805
## iter 100 value 208.853485
## final value 208.853485
## stopped after 100 iterations
## # weights: 23
## initial value 1774.653164
## iter 10 value 352.351367
## iter 20 value 231.594755
## iter 30 value 216.035023
## iter 40 value 203.976231
## iter 50 value 200.801138
## iter 60 value 200.203192
## iter 70 value 200.184922
## iter 80 value 200.169431
## final value 200.169285
## converged
## # weights: 67
## initial value 1783.509492
## iter 10 value 261.460291
## iter 20 value 194.244784
## iter 30 value 177.004376
## iter 40 value 169.099304
## iter 50 value 162.792325
## iter 60 value 160.689421
## iter 70 value 159.946439
## iter 80 value 159.709721
## iter 90 value 159.507632
## iter 100 value 159.412811
## final value 159.412811
## stopped after 100 iterations
## # weights: 111
## initial value 2218.251784
## iter 10 value 1033.968468
## iter 20 value 219.350087
## iter 30 value 170.427060
## iter 40 value 130.541253
## iter 50 value 96.902252
## iter 60 value 82.668712
## iter 70 value 75.233007
## iter 80 value 70.076214
## iter 90 value 66.526063
## iter 100 value 64.922841
## final value 64.922841
## stopped after 100 iterations
## # weights: 23
## initial value 1925.618035
## iter 10 value 899.333012
## iter 20 value 225.090879
## iter 30 value 211.444286
## iter 40 value 208.823614
## iter 50 value 208.270747
## iter 60 value 208.235943
## iter 60 value 208.235943
## final value 208.235943
## converged
## # weights: 67
## initial value 2059.206909
## iter 10 value 605.931945
## iter 20 value 255.252239
## iter 30 value 194.228476
## iter 40 value 182.917201
## iter 50 value 177.302375
## iter 60 value 170.938073
## iter 70 value 166.365611
## iter 80 value 161.610789
## iter 90 value 157.946524
## iter 100 value 156.788016
## final value 156.788016
## stopped after 100 iterations
## # weights: 111
## initial value 1786.282105
## iter 10 value 690.020039
## iter 20 value 230.523353
## iter 30 value 188.847731
## iter 40 value 176.954006
## iter 50 value 169.904933
## iter 60 value 166.484330
## iter 70 value 165.886505
## iter 80 value 164.900760
## iter 90 value 164.019835
## iter 100 value 163.588939
## final value 163.588939
## stopped after 100 iterations
## # weights: 23
## initial value 1793.911210
## iter 10 value 1107.325342
## iter 20 value 414.140566
## iter 30 value 252.310048
## iter 40 value 250.856550
## iter 50 value 249.688365
## final value 249.687735
## converged
## # weights: 67
## initial value 1758.579031
## iter 10 value 359.977237
## iter 20 value 284.059982
## iter 30 value 249.927936
## iter 40 value 242.692289
## iter 50 value 234.474710
## iter 60 value 224.330470
## iter 70 value 222.535330
## iter 80 value 222.346894
## final value 222.345836
## converged
## # weights: 111
## initial value 1928.224814
## iter 10 value 350.839972
## iter 20 value 254.837267
## iter 30 value 222.995344
## iter 40 value 218.030145
## iter 50 value 214.013675
## iter 60 value 212.978498
## iter 70 value 212.179599
## iter 80 value 211.672985
## iter 90 value 211.465288
## iter 100 value 211.446694
## final value 211.446694
## stopped after 100 iterations
## # weights: 23
## initial value 1822.152044
## iter 10 value 626.299072
## iter 20 value 227.981118
## iter 30 value 219.858726
## iter 40 value 211.326840
## iter 50 value 208.811192
## iter 60 value 208.416556
## iter 70 value 208.330786
## iter 80 value 208.292581
## final value 208.292553
## converged
## # weights: 67
## initial value 1918.768871
## iter 10 value 487.603652
## iter 20 value 210.666353
## iter 30 value 194.692325
## iter 40 value 183.824530
## iter 50 value 177.450422
## iter 60 value 164.275260
## iter 70 value 151.872016
## iter 80 value 144.779452
## iter 90 value 143.121470
## iter 100 value 141.670089
## final value 141.670089
## stopped after 100 iterations
## # weights: 111
## initial value 1779.119880
## iter 10 value 556.063161
## iter 20 value 191.110904
## iter 30 value 141.255433
## iter 40 value 121.851886
## iter 50 value 112.187513
## iter 60 value 104.363968
## iter 70 value 96.263204
## iter 80 value 90.285686
## iter 90 value 84.725166
## iter 100 value 79.981685
## final value 79.981685
## stopped after 100 iterations
## # weights: 23
## initial value 1918.010411
## final value 1778.612548
## converged
## # weights: 67
## initial value 1870.523460
## iter 10 value 228.705928
## iter 20 value 193.665608
## iter 30 value 193.358744
## iter 40 value 193.352591
## final value 193.341686
## converged
## # weights: 111
## initial value 1785.353703
## iter 10 value 324.779153
## iter 20 value 171.196883
## iter 30 value 149.173272
## iter 40 value 119.991149
## iter 50 value 103.569506
## iter 60 value 93.976656
## iter 70 value 87.631400
## iter 80 value 82.978400
## iter 90 value 81.794060
## iter 100 value 80.832778
## final value 80.832778
## stopped after 100 iterations
## # weights: 23
## initial value 1754.869909
## iter 10 value 455.266753
## iter 20 value 248.814318
## iter 30 value 238.511922
## final value 238.479970
## converged
## # weights: 67
## initial value 2024.245060
## iter 10 value 1030.250388
## iter 20 value 695.315728
## iter 30 value 277.204142
## iter 40 value 235.867137
## iter 50 value 229.103862
## iter 60 value 221.940434
## iter 70 value 220.060623
## iter 80 value 218.807411
## iter 90 value 218.593287
## iter 100 value 218.408455
## final value 218.408455
## stopped after 100 iterations
## # weights: 111
## initial value 1817.915425
## iter 10 value 917.159808
## iter 20 value 378.861279
## iter 30 value 268.754728
## iter 40 value 250.627399
## iter 50 value 239.587872
## iter 60 value 225.127615
## iter 70 value 212.726527
## iter 80 value 210.054762
## iter 90 value 208.307973
## iter 100 value 205.252213
## final value 205.252213
## stopped after 100 iterations
## # weights: 23
## initial value 1803.301385
## iter 10 value 1551.585704
## iter 20 value 278.147176
## iter 30 value 219.127612
## iter 40 value 195.950206
## iter 50 value 194.312865
## iter 60 value 194.301426
## iter 70 value 194.285771
## iter 80 value 194.074226
## iter 90 value 193.806458
## iter 100 value 193.595260
## final value 193.595260
## stopped after 100 iterations
## # weights: 67
## initial value 1800.879404
## iter 10 value 775.072998
## iter 20 value 196.952057
## iter 30 value 175.032362
## iter 40 value 162.989290
## iter 50 value 154.084627
## iter 60 value 148.031541
## iter 70 value 143.523023
## iter 80 value 140.230894
## iter 90 value 136.412555
## iter 100 value 133.559130
## final value 133.559130
## stopped after 100 iterations
## # weights: 111
## initial value 1939.436506
## iter 10 value 398.548573
## iter 20 value 168.802103
## iter 30 value 132.645936
## iter 40 value 123.205723
## iter 50 value 119.721569
## iter 60 value 115.372416
## iter 70 value 112.296498
## iter 80 value 109.785631
## iter 90 value 106.280493
## iter 100 value 105.370992
## final value 105.370992
## stopped after 100 iterations
## # weights: 23
## initial value 1853.157982
## iter 10 value 793.221602
## iter 20 value 328.450373
## iter 30 value 270.598660
## iter 40 value 256.139580
## iter 50 value 252.526978
## iter 60 value 251.601379
## iter 70 value 248.039946
## iter 80 value 241.978508
## iter 90 value 241.707142
## iter 100 value 241.596627
## final value 241.596627
## stopped after 100 iterations
## # weights: 67
## initial value 1912.269442
## iter 10 value 1679.024192
## iter 20 value 574.812530
## iter 30 value 467.261229
## iter 40 value 275.960584
## iter 50 value 206.209260
## iter 60 value 193.481074
## iter 70 value 190.055255
## iter 80 value 189.000427
## iter 90 value 188.167527
## iter 100 value 186.874501
## final value 186.874501
## stopped after 100 iterations
## # weights: 111
## initial value 1748.937377
## iter 10 value 291.892139
## iter 20 value 192.121402
## iter 30 value 151.025402
## iter 40 value 133.350637
## iter 50 value 112.847689
## iter 60 value 101.308722
## iter 70 value 95.109154
## iter 80 value 93.303947
## iter 90 value 90.223067
## iter 100 value 89.241647
## final value 89.241647
## stopped after 100 iterations
## # weights: 23
## initial value 2241.237336
## iter 10 value 681.533267
## iter 20 value 266.656175
## iter 30 value 255.314094
## iter 40 value 246.953713
## iter 50 value 246.152432
## final value 246.152389
## converged
## # weights: 67
## initial value 2162.326452
## iter 10 value 1084.066310
## iter 20 value 303.895365
## iter 30 value 247.330743
## iter 40 value 230.448324
## iter 50 value 226.070222
## iter 60 value 224.267335
## iter 70 value 218.651549
## iter 80 value 217.987312
## iter 90 value 217.852035
## final value 217.851109
## converged
## # weights: 111
## initial value 1813.138120
## iter 10 value 343.560720
## iter 20 value 255.927990
## iter 30 value 234.304154
## iter 40 value 223.920956
## iter 50 value 217.342143
## iter 60 value 212.893216
## iter 70 value 210.282662
## iter 80 value 209.803658
## iter 90 value 209.485669
## iter 100 value 209.425680
## final value 209.425680
## stopped after 100 iterations
## # weights: 23
## initial value 1785.723203
## iter 10 value 480.378434
## iter 20 value 223.827394
## iter 30 value 215.711891
## iter 40 value 206.284852
## iter 50 value 205.146330
## iter 60 value 204.781725
## iter 70 value 204.711738
## iter 80 value 204.667314
## final value 204.665151
## converged
## # weights: 67
## initial value 1802.301951
## iter 10 value 739.897614
## iter 20 value 242.606873
## iter 30 value 162.676820
## iter 40 value 136.003469
## iter 50 value 127.058509
## iter 60 value 123.985233
## iter 70 value 122.717353
## iter 80 value 121.972413
## iter 90 value 121.250009
## iter 100 value 120.888539
## final value 120.888539
## stopped after 100 iterations
## # weights: 111
## initial value 1784.328593
## iter 10 value 686.621154
## iter 20 value 228.912725
## iter 30 value 162.098714
## iter 40 value 132.749641
## iter 50 value 101.956254
## iter 60 value 88.072954
## iter 70 value 83.640489
## iter 80 value 80.613307
## iter 90 value 77.485065
## iter 100 value 74.623539
## final value 74.623539
## stopped after 100 iterations
## # weights: 23
## initial value 1804.927927
## iter 10 value 656.836835
## iter 20 value 202.699026
## iter 30 value 199.831802
## iter 40 value 199.503599
## iter 50 value 198.932169
## iter 60 value 198.817961
## iter 70 value 198.799921
## iter 80 value 198.795984
## final value 198.795923
## converged
## # weights: 67
## initial value 1888.831829
## iter 10 value 668.232436
## iter 20 value 292.166996
## iter 30 value 203.784626
## iter 40 value 198.838138
## iter 50 value 198.818700
## iter 60 value 198.795711
## final value 198.795678
## converged
## # weights: 111
## initial value 1834.647516
## iter 10 value 551.234316
## iter 20 value 196.486938
## iter 30 value 188.750030
## iter 40 value 182.311213
## iter 50 value 177.698050
## iter 60 value 173.979332
## iter 70 value 170.923164
## iter 80 value 169.787122
## iter 90 value 166.915997
## iter 100 value 163.893241
## final value 163.893241
## stopped after 100 iterations
## # weights: 23
## initial value 1764.338088
## iter 10 value 405.279468
## iter 20 value 249.339382
## iter 30 value 243.347003
## iter 40 value 242.003659
## final value 242.003653
## converged
## # weights: 67
## initial value 1868.920092
## iter 10 value 746.702346
## iter 20 value 303.021772
## iter 30 value 247.490101
## iter 40 value 231.350756
## iter 50 value 227.177425
## iter 60 value 226.886473
## iter 70 value 226.708262
## iter 80 value 225.799534
## iter 90 value 220.504671
## iter 100 value 219.066947
## final value 219.066947
## stopped after 100 iterations
## # weights: 111
## initial value 2025.698809
## iter 10 value 772.421454
## iter 20 value 332.207698
## iter 30 value 249.856784
## iter 40 value 231.922071
## iter 50 value 218.201483
## iter 60 value 214.374753
## iter 70 value 212.369529
## iter 80 value 210.765089
## iter 90 value 209.519377
## iter 100 value 208.850880
## final value 208.850880
## stopped after 100 iterations
## # weights: 23
## initial value 1813.933468
## iter 10 value 678.692496
## iter 20 value 214.487115
## iter 30 value 206.711562
## iter 40 value 202.876054
## iter 50 value 201.470533
## iter 60 value 200.226578
## iter 70 value 199.044699
## iter 80 value 198.860745
## final value 198.858870
## converged
## # weights: 67
## initial value 1819.754625
## iter 10 value 350.143060
## iter 20 value 178.038940
## iter 30 value 141.305422
## iter 40 value 134.369296
## iter 50 value 127.688311
## iter 60 value 118.941355
## iter 70 value 115.150961
## iter 80 value 113.783617
## iter 90 value 111.486580
## iter 100 value 110.633311
## final value 110.633311
## stopped after 100 iterations
## # weights: 111
## initial value 2201.482476
## iter 10 value 1635.535022
## iter 20 value 331.282792
## iter 30 value 208.087068
## iter 40 value 200.795359
## iter 50 value 180.423895
## iter 60 value 166.811272
## iter 70 value 162.254482
## iter 80 value 160.546238
## iter 90 value 157.908095
## iter 100 value 155.635291
## final value 155.635291
## stopped after 100 iterations
## # weights: 23
## initial value 2045.966647
## iter 10 value 353.273549
## iter 20 value 216.150143
## iter 30 value 209.390767
## iter 40 value 206.246195
## iter 50 value 206.238592
## iter 60 value 206.139003
## iter 70 value 206.103046
## final value 206.102976
## converged
## # weights: 67
## initial value 1802.441664
## iter 10 value 747.660799
## iter 20 value 176.201301
## iter 30 value 143.977149
## iter 40 value 124.168551
## iter 50 value 115.418386
## iter 60 value 106.393461
## iter 70 value 102.093619
## iter 80 value 99.106574
## iter 90 value 97.127262
## iter 100 value 95.597963
## final value 95.597963
## stopped after 100 iterations
## # weights: 111
## initial value 1910.359904
## iter 10 value 668.550422
## iter 20 value 265.310804
## iter 30 value 179.062894
## iter 40 value 147.183697
## iter 50 value 120.770910
## iter 60 value 107.073819
## iter 70 value 102.315772
## iter 80 value 98.109622
## iter 90 value 94.654810
## iter 100 value 91.261244
## final value 91.261244
## stopped after 100 iterations
## # weights: 23
## initial value 1906.008429
## iter 10 value 1051.499706
## iter 20 value 473.771457
## iter 30 value 257.326443
## iter 40 value 247.873139
## final value 247.696240
## converged
## # weights: 67
## initial value 2016.461011
## iter 10 value 603.335001
## iter 20 value 282.848697
## iter 30 value 237.169945
## iter 40 value 227.510601
## iter 50 value 226.064674
## iter 60 value 225.770783
## iter 70 value 225.650665
## final value 225.650597
## converged
## # weights: 111
## initial value 1785.268541
## iter 10 value 480.631908
## iter 20 value 273.516746
## iter 30 value 246.292141
## iter 40 value 235.552541
## iter 50 value 227.550164
## iter 60 value 225.022507
## iter 70 value 224.003771
## iter 80 value 220.976355
## iter 90 value 216.230374
## iter 100 value 211.975472
## final value 211.975472
## stopped after 100 iterations
## # weights: 23
## initial value 1884.859719
## iter 10 value 1690.055991
## iter 20 value 844.384098
## iter 30 value 647.026254
## iter 40 value 354.787243
## iter 50 value 304.152586
## iter 60 value 240.034966
## iter 70 value 230.078748
## iter 80 value 227.315533
## iter 90 value 218.263173
## iter 100 value 208.764959
## final value 208.764959
## stopped after 100 iterations
## # weights: 67
## initial value 1830.925840
## iter 10 value 700.140773
## iter 20 value 329.494769
## iter 30 value 180.602862
## iter 40 value 134.735313
## iter 50 value 120.287230
## iter 60 value 117.102379
## iter 70 value 115.910709
## iter 80 value 115.222412
## iter 90 value 114.659007
## iter 100 value 113.948121
## final value 113.948121
## stopped after 100 iterations
## # weights: 111
## initial value 1718.297535
## iter 10 value 455.747493
## iter 20 value 171.087501
## iter 30 value 138.255385
## iter 40 value 98.695136
## iter 50 value 74.674383
## iter 60 value 63.667139
## iter 70 value 57.870142
## iter 80 value 55.931935
## iter 90 value 53.382731
## iter 100 value 50.152575
## final value 50.152575
## stopped after 100 iterations
## # weights: 111
## initial value 1986.967734
## iter 10 value 588.107606
## iter 20 value 342.684759
## iter 30 value 263.214914
## iter 40 value 249.105136
## iter 50 value 241.224875
## iter 60 value 232.773878
## iter 70 value 228.051181
## iter 80 value 227.291035
## iter 90 value 227.199732
## iter 100 value 227.160237
## final value 227.160237
## stopped after 100 iterations
models
## $svmPoly
## Support Vector Machines with Polynomial Kernel
##
## 2851 samples
## 20 predictor
## 2 classes: 'female', 'male'
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 2566, 2565, 2566, 2566, 2565, 2566, ...
## Resampling results across tuning parameters:
##
## degree scale C Accuracy Kappa
## 1 0.001 0.25 0.8989754 0.7978942
## 1 0.001 0.50 0.9172210 0.8344087
## 1 0.001 1.00 0.9579106 0.9158175
## 1 0.010 0.25 0.9705422 0.9410839
## 1 0.010 0.50 0.9722917 0.9445821
## 1 0.010 1.00 0.9722929 0.9445844
## 1 0.100 0.25 0.9722917 0.9445810
## 1 0.100 0.50 0.9743957 0.9487894
## 1 0.100 1.00 0.9736927 0.9473831
## 2 0.001 0.25 0.9189742 0.8379163
## 2 0.001 0.50 0.9600171 0.9200317
## 2 0.001 1.00 0.9698417 0.9396832
## 2 0.010 0.25 0.9722917 0.9445822
## 2 0.010 0.50 0.9726413 0.9452815
## 2 0.010 1.00 0.9740436 0.9480860
## 2 0.100 0.25 0.9786087 0.9572169
## 2 0.100 0.50 0.9782566 0.9565129
## 2 0.100 1.00 0.9782566 0.9565127
## 3 0.001 0.25 0.9494920 0.8989776
## 3 0.001 0.50 0.9687866 0.9375734
## 3 0.001 1.00 0.9722941 0.9445875
## 3 0.010 0.25 0.9722904 0.9445802
## 3 0.010 0.50 0.9729934 0.9459862
## 3 0.010 1.00 0.9758005 0.9516000
## 3 0.100 0.25 0.9761489 0.9522972
## 3 0.100 0.50 0.9772015 0.9544023
## 3 0.100 1.00 0.9803594 0.9607182
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were degree = 3, scale = 0.1 and C = 1.
##
## $nnet
## Neural Network
##
## 2851 samples
## 20 predictor
## 2 classes: 'female', 'male'
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 2566, 2565, 2566, 2566, 2565, 2566, ...
## Resampling results across tuning parameters:
##
## size decay Accuracy Kappa
## 1 0e+00 0.8812268 0.7621029
## 1 1e-04 0.9743957 0.9487914
## 1 1e-01 0.9754471 0.9508937
## 3 0e+00 0.9729959 0.9459919
## 3 1e-04 0.9719359 0.9438704
## 3 1e-01 0.9757992 0.9515980
## 5 0e+00 0.9705398 0.9410795
## 5 1e-04 0.9740436 0.9480863
## 5 1e-01 0.9789572 0.9579133
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were size = 5 and decay = 0.1.
##
## attr(,"class")
## [1] "caretList"
# Lets see results. note the best performing model
results <- resamples(models)
results$values
## Resample svmPoly~Accuracy svmPoly~Kappa nnet~Accuracy nnet~Kappa
## 1 Fold01 0.9754386 0.9508754 0.9824561 0.9649127
## 2 Fold02 0.9790210 0.9580420 0.9790210 0.9580420
## 3 Fold03 0.9824561 0.9649093 0.9859649 0.9719267
## 4 Fold04 0.9754386 0.9508802 0.9754386 0.9508754
## 5 Fold05 0.9895105 0.9790210 0.9825175 0.9650350
## 6 Fold06 0.9894737 0.9789476 0.9824561 0.9649110
## 7 Fold07 0.9649123 0.9298202 0.9649123 0.9298237
## 8 Fold08 0.9929577 0.9859155 0.9894366 0.9788732
## 9 Fold09 0.9719298 0.9438617 0.9684211 0.9368429
## 10 Fold10 0.9824561 0.9649093 0.9789474 0.9578901
summary(results)
##
## Call:
## summary.resamples(object = results)
##
## Models: svmPoly, nnet
## Number of resamples: 10
##
## Accuracy
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## svmPoly 0.9649 0.9754 0.9807 0.9804 0.9877 0.9930 0
## nnet 0.9649 0.9763 0.9807 0.9790 0.9825 0.9894 0
##
## Kappa
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## svmPoly 0.9298 0.9509 0.9615 0.9607 0.9754 0.9859 0
## nnet 0.9298 0.9526 0.9615 0.9579 0.9650 0.9789 0
NN<-cm1$overall[1:2]
SVM<-cm2$overall[1:2]
improvedNN<-c(mean(results$values[,4]),mean(results$values[,5]))
improvedSVM<-c(mean(results$values[,2]),mean(results$values[,3]))
comparsion<-rbind(NN,SVM,improvedNN,improvedSVM)
comparsion
## Accuracy Kappa
## NN 0.9589905 0.9179362
## SVM 0.9684543 0.9369054
## improvedNN 0.9789572 0.9579133
## improvedSVM 0.9803594 0.9607182
After comparing the Accuarcy and Kappa, we choose the ensembel model using SVM
# Final Model
pre<-predict(models$svmPoly,rdata_norm)
confusionMatrix(pre, rdata_norm$label)
## Confusion Matrix and Statistics
##
## Reference
## Prediction female male
## female 1569 10
## male 15 1574
##
## Accuracy : 0.9921
## 95% CI : (0.9884, 0.9949)
## No Information Rate : 0.5
## P-Value [Acc > NIR] : <2e-16
##
## Kappa : 0.9842
## Mcnemar's Test P-Value : 0.4237
##
## Sensitivity : 0.9905
## Specificity : 0.9937
## Pos Pred Value : 0.9937
## Neg Pred Value : 0.9906
## Prevalence : 0.5000
## Detection Rate : 0.4953
## Detection Prevalence : 0.4984
## Balanced Accuracy : 0.9921
##
## 'Positive' Class : female
##
Using this model to predict the whole dataset has a accuracy of 0.9823 and a kappa of 0.9646
#Pick couples of example from test data
examples<-test[sample(1:nrow(test), 2),]
examples
## meanfreq sd median Q25 Q75 IQR skew
## 255 0.4331682 0.7117662 0.4783678 0.2623167 0.7208845 0.5449509 0.9945649
## 1837 0.6082331 0.3027127 0.6405013 0.6297574 0.6292649 0.0740336 0.0880202
## kurt sp.ent sfm mode centroid meanfun
## 255 0.97073961 0.4712677 0.6116976 0.0000000 0.4331682 0.2277502
## 1837 0.01024507 0.5577099 0.4220036 0.5566188 0.6082331 0.6241873
## minfun maxfun meandom mindom maxdom dfrange
## 255 0.0573314 0.9292854 0.0000000 0.006451613 0.0000000 0.0000000
## 1837 0.3077100 0.9815258 0.1180554 0.006451613 0.2047891 0.2049356
## modindx label
## 255 0.00000000 male
## 1837 0.08873415 female
predict(models$svmPoly,examples)
## [1] male female
## Levels: female male
confusionMatrix(predict(models$svmPoly,examples), examples$label)
## Confusion Matrix and Statistics
##
## Reference
## Prediction female male
## female 1 0
## male 0 1
##
## Accuracy : 1
## 95% CI : (0.1581, 1)
## No Information Rate : 0.5
## P-Value [Acc > NIR] : 0.25
##
## Kappa : 1
## Mcnemar's Test P-Value : NA
##
## Sensitivity : 1.0
## Specificity : 1.0
## Pos Pred Value : 1.0
## Neg Pred Value : 1.0
## Prevalence : 0.5
## Detection Rate : 0.5
## Detection Prevalence : 0.5
## Balanced Accuracy : 1.0
##
## 'Positive' Class : female
##
The example 2039 and No.1758 are randomly selected from the test data.The predictions for these two examples are female and female, which are all correct compared with their labels.
What I learnt: SVM and Ann are both good algorithms to do classification analysis for numerical data. Cross validation and ensemble methods are good ways to improve model performance.