Toy example of different SVM methods available in {caret} using {kernlab}. Dataset used is iris with a multiclass response.
Note that the accuracy and Kappa values reported are in-sample. Increasing the value of trainControl(number = X) should decrease these metrics in-sample, but provide more stable (or closer aligned) metrics out-of-sample. Since this is a toy example with 150 observations, no test set was created.
# Load packages
library(caret)
# Load dataset
iris = datasets::iris
# Check structure
str(iris)## 'data.frame': 150 obs. of 5 variables:
## $ Sepal.Length: num 5.1 4.9 4.7 4.6 5 5.4 4.6 5 4.4 4.9 ...
## $ Sepal.Width : num 3.5 3 3.2 3.1 3.6 3.9 3.4 3.4 2.9 3.1 ...
## $ Petal.Length: num 1.4 1.4 1.3 1.5 1.4 1.7 1.4 1.5 1.4 1.5 ...
## $ Petal.Width : num 0.2 0.2 0.2 0.2 0.2 0.4 0.3 0.2 0.2 0.1 ...
## $ Species : Factor w/ 3 levels "setosa","versicolor",..: 1 1 1 1 1 1 1 1 1 1 ...
# Specify fit parameters
iris.svm.fc = trainControl(method = "cv",
number = 5,
classProbs = T)See model notes in comments below for description.
#--------------------------------------
# Model 1
#--------------------------------------
# Model notes:
# warnings() == F
# sigma == constant
# C == varied, 9 times, each = 1
# Weight == NULL
# Build model
ptm = proc.time()
set.seed(123)
iris.svm.m1 = train(x = iris[, -5],
y = iris[, 5],
method = "svmRadial",
preProcess = c("center", "scale"),
trControl = iris.svm.fc,
tuneLength = 9)
proc.time() - ptm; rm(ptm)## user system elapsed
## 3.56 0.02 3.64
# In-sample summary
iris.svm.m1$finalModel## Support Vector Machine object of class "ksvm"
##
## SV type: C-svc (classification)
## parameter : cost C = 0.25
##
## Gaussian Radial Basis kernel function.
## Hyperparameter : sigma = 0.805347801803707
##
## Number of Support Vectors : 87
##
## Objective Function Value : -3.9244 -4.3221 -9.9467
## Training error : 0.04
## Probability model included.
iris.svm.m1$results## sigma C Accuracy Kappa AccuracySD KappaSD
## 1 0.8053478 0.25 0.9466667 0.92 0.05055250 0.07582875
## 2 0.8053478 0.50 0.9466667 0.92 0.05055250 0.07582875
## 3 0.8053478 1.00 0.9466667 0.92 0.04472136 0.06708204
## 4 0.8053478 2.00 0.9466667 0.92 0.06055301 0.09082951
## 5 0.8053478 4.00 0.9400000 0.91 0.05962848 0.08944272
## 6 0.8053478 8.00 0.9400000 0.91 0.04346135 0.06519202
## 7 0.8053478 16.00 0.9466667 0.92 0.04472136 0.06708204
## 8 0.8053478 32.00 0.9333333 0.90 0.05270463 0.07905694
## 9 0.8053478 64.00 0.9400000 0.91 0.05477226 0.08215838
# Plots
plot(iris.svm.m1, main = "Accuracy: iris.svm.m1")plot(varImp(iris.svm.m1), main = "Var Imp: iris.svm.m1")# In-sample fit
iris.svm.m1.trn.pred = predict(iris.svm.m1, newdata = iris[, -5])
iris.svm.m1.trn.cm = confusionMatrix(iris.svm.m1.trn.pred, iris$Species)
iris.svm.m1.trn.cm$table## Reference
## Prediction setosa versicolor virginica
## setosa 50 0 0
## versicolor 0 47 3
## virginica 0 3 47
iris.svm.m1.trn.cm$overall[1:2]## Accuracy Kappa
## 0.96 0.94
See model notes in comments below for description.
#--------------------------------------
# Model 2
#--------------------------------------
# Model notes:
# warnings() == F
# sigma == NULL
# C == varied, 9 times, each = 1
# Weight == NULL
# Build model
ptm = proc.time()
set.seed(123)
iris.svm.m2 = train(x = iris[, -5],
y = iris[, 5],
method = "svmRadialCost",
preProcess = c("center", "scale"),
trControl = iris.svm.fc,
tuneLength = 9)
proc.time() - ptm; rm(ptm)## user system elapsed
## 3.14 0.00 3.24
# In-sample summary
iris.svm.m2$finalModel## Support Vector Machine object of class "ksvm"
##
## SV type: C-svc (classification)
## parameter : cost C = 0.5
##
## Gaussian Radial Basis kernel function.
## Hyperparameter : sigma = 0.771020422139363
##
## Number of Support Vectors : 66
##
## Objective Function Value : -4.2431 -4.695 -14.3301
## Training error : 0.02
## Probability model included.
iris.svm.m2$results## C Accuracy Kappa AccuracySD KappaSD
## 1 0.25 0.9266667 0.89 0.04346135 0.06519202
## 2 0.50 0.9533333 0.93 0.03800585 0.05700877
## 3 1.00 0.9400000 0.91 0.05477226 0.08215838
## 4 2.00 0.9400000 0.91 0.05477226 0.08215838
## 5 4.00 0.9400000 0.91 0.04346135 0.06519202
## 6 8.00 0.9400000 0.91 0.04346135 0.06519202
## 7 16.00 0.9400000 0.91 0.05477226 0.08215838
## 8 32.00 0.9400000 0.91 0.04346135 0.06519202
## 9 64.00 0.9333333 0.90 0.05270463 0.07905694
# Plots
plot(iris.svm.m2, main = "Accuracy: iris.svm.m2")plot(varImp(iris.svm.m2), main = "Var Imp: iris.svm.m2")# In-sample fit
iris.svm.m2.trn.pred = predict(iris.svm.m2, newdata = iris[, -5])
iris.svm.m2.trn.cm = confusionMatrix(iris.svm.m2.trn.pred, iris$Species)
iris.svm.m2.trn.cm$table## Reference
## Prediction setosa versicolor virginica
## setosa 50 0 0
## versicolor 0 48 1
## virginica 0 2 49
iris.svm.m2.trn.cm$overall[1:2]## Accuracy Kappa
## 0.98 0.97
See model notes in comments below for description.
#--------------------------------------
# Model 3
#--------------------------------------
# Model notes:
# warnings() == F
# sigma == varied, 6 times, each = 9
# C == varied, 9 times, each = 6
# Weight == NULL
# Build model
ptm = proc.time()
set.seed(123)
iris.svm.m3 = train(x = iris[, -5],
y = iris[, 5],
method = "svmRadialSigma",
preProcess = c("center", "scale"),
trControl = iris.svm.fc,
tuneLength = 9)
proc.time() - ptm; rm(ptm)## user system elapsed
## 15.53 0.00 15.93
# In-sample summary
iris.svm.m3$finalModel## Support Vector Machine object of class "ksvm"
##
## SV type: C-svc (classification)
## parameter : cost C = 0.5
##
## Gaussian Radial Basis kernel function.
## Hyperparameter : sigma = 0.358006842954026
##
## Number of Support Vectors : 67
##
## Objective Function Value : -3.2123 -3.1888 -15.3711
## Training error : 0.033333
## Probability model included.
iris.svm.m3$results## sigma C Accuracy Kappa AccuracySD KappaSD
## 1 0.05977954 0.25 0.9000000 0.85 0.06666667 0.10000000
## 2 0.05977954 0.50 0.9533333 0.93 0.05055250 0.07582875
## 3 0.05977954 1.00 0.9400000 0.91 0.04346135 0.06519202
## 4 0.05977954 2.00 0.9533333 0.93 0.02981424 0.04472136
## 5 0.05977954 4.00 0.9600000 0.94 0.03651484 0.05477226
## 6 0.05977954 8.00 0.9466667 0.92 0.03800585 0.05700877
## 7 0.05977954 16.00 0.9533333 0.93 0.02981424 0.04472136
## 8 0.05977954 32.00 0.9533333 0.93 0.03800585 0.05700877
## 9 0.05977954 64.00 0.9600000 0.94 0.03651484 0.05477226
## 10 0.35800684 0.25 0.9400000 0.91 0.05477226 0.08215838
## 11 0.35800684 0.50 0.9600000 0.94 0.03651484 0.05477226
## 12 0.35800684 1.00 0.9533333 0.93 0.03800585 0.05700877
## 13 0.35800684 2.00 0.9533333 0.93 0.05055250 0.07582875
## 14 0.35800684 4.00 0.9466667 0.92 0.04472136 0.06708204
## 15 0.35800684 8.00 0.9600000 0.94 0.04346135 0.06519202
## 16 0.35800684 16.00 0.9600000 0.94 0.04346135 0.06519202
## 17 0.35800684 32.00 0.9466667 0.92 0.03800585 0.05700877
## 18 0.35800684 64.00 0.9333333 0.90 0.05270463 0.07905694
## 19 0.65623415 0.25 0.9400000 0.91 0.05477226 0.08215838
## 20 0.65623415 0.50 0.9466667 0.92 0.05055250 0.07582875
## 21 0.65623415 1.00 0.9400000 0.91 0.05477226 0.08215838
## 22 0.65623415 2.00 0.9400000 0.91 0.05477226 0.08215838
## 23 0.65623415 4.00 0.9466667 0.92 0.06055301 0.09082951
## 24 0.65623415 8.00 0.9466667 0.92 0.05055250 0.07582875
## 25 0.65623415 16.00 0.9400000 0.91 0.05477226 0.08215838
## 26 0.65623415 32.00 0.9333333 0.90 0.05270463 0.07905694
## 27 0.65623415 64.00 0.9266667 0.89 0.05962848 0.08944272
## 28 0.95446145 0.25 0.9333333 0.90 0.05270463 0.07905694
## 29 0.95446145 0.50 0.9466667 0.92 0.05055250 0.07582875
## 30 0.95446145 1.00 0.9400000 0.91 0.05477226 0.08215838
## 31 0.95446145 2.00 0.9400000 0.91 0.05477226 0.08215838
## 32 0.95446145 4.00 0.9400000 0.91 0.05962848 0.08944272
## 33 0.95446145 8.00 0.9333333 0.90 0.05270463 0.07905694
## 34 0.95446145 16.00 0.9466667 0.92 0.04472136 0.06708204
## 35 0.95446145 32.00 0.9333333 0.90 0.05270463 0.07905694
## 36 0.95446145 64.00 0.9333333 0.90 0.04082483 0.06123724
## 37 1.25268876 0.25 0.9400000 0.91 0.04944132 0.07416198
## 38 1.25268876 0.50 0.9466667 0.92 0.05055250 0.07582875
## 39 1.25268876 1.00 0.9466667 0.92 0.04472136 0.06708204
## 40 1.25268876 2.00 0.9400000 0.91 0.05477226 0.08215838
## 41 1.25268876 4.00 0.9333333 0.90 0.04082483 0.06123724
## 42 1.25268876 8.00 0.9333333 0.90 0.05270463 0.07905694
## 43 1.25268876 16.00 0.9400000 0.91 0.04346135 0.06519202
## 44 1.25268876 32.00 0.9400000 0.91 0.04346135 0.06519202
## 45 1.25268876 64.00 0.9400000 0.91 0.04346135 0.06519202
## 46 1.55091607 0.25 0.9400000 0.91 0.04944132 0.07416198
## 47 1.55091607 0.50 0.9466667 0.92 0.05055250 0.07582875
## 48 1.55091607 1.00 0.9466667 0.92 0.04472136 0.06708204
## 49 1.55091607 2.00 0.9400000 0.91 0.04346135 0.06519202
## 50 1.55091607 4.00 0.9400000 0.91 0.04346135 0.06519202
## 51 1.55091607 8.00 0.9333333 0.90 0.05270463 0.07905694
## 52 1.55091607 16.00 0.9333333 0.90 0.05270463 0.07905694
## 53 1.55091607 32.00 0.9400000 0.91 0.04346135 0.06519202
## 54 1.55091607 64.00 0.9333333 0.90 0.05270463 0.07905694
# Plots
plot(iris.svm.m3, main = "Accuracy: iris.svm.m3")plot(varImp(iris.svm.m3), main = "Var Imp: iris.svm.m3")# In-sample fit
iris.svm.m3.trn.pred = predict(iris.svm.m3, newdata = iris[, -5])
iris.svm.m3.trn.cm = confusionMatrix(iris.svm.m3.trn.pred, iris$Species)
iris.svm.m3.trn.cm$table## Reference
## Prediction setosa versicolor virginica
## setosa 50 0 0
## versicolor 0 48 2
## virginica 0 2 48
iris.svm.m3.trn.cm$overall[1:2]## Accuracy Kappa
## 0.9733333 0.9600000
See model notes in comments below for description.
#--------------------------------------
# Model 4
#--------------------------------------
# Model notes:
# warnings() == T
# sigma == constant
# C == varied, 9 times
# Weight == ifelse(Weight == 1, T, F)
# Build model
ptm = proc.time()
set.seed(123)
iris.svm.m4 = train(x = iris[, -5],
y = iris[, 5],
method = "svmRadialWeights",
preProcess = c("center", "scale"),
trControl = iris.svm.fc,
tuneLength = 9)
proc.time() - ptm; rm(ptm)## user system elapsed
## 5.58 0.00 6.00
# In-sample summary
iris.svm.m4$finalModel## Support Vector Machine object of class "ksvm"
##
## SV type: C-svc (classification)
## parameter : cost C = 0.5
##
## Gaussian Radial Basis kernel function.
## Hyperparameter : sigma = 0.805347801803707
##
## Number of Support Vectors : 66
##
## Objective Function Value : -4.3396 -4.8137 -14.3118
## Training error : 0.02
## Probability model included.
iris.svm.m4$results## sigma C Weight Accuracy Kappa AccuracySD KappaSD
## 1 0.8053478 0.25 1 0.9400000 0.91 0.05477226 0.08215838
## 2 0.8053478 0.25 2 NaN NaN NA NA
## 3 0.8053478 0.25 3 NaN NaN NA NA
## 4 0.8053478 0.25 4 NaN NaN NA NA
## 5 0.8053478 0.25 5 NaN NaN NA NA
## 6 0.8053478 0.25 6 NaN NaN NA NA
## 7 0.8053478 0.25 7 NaN NaN NA NA
## 8 0.8053478 0.25 8 NaN NaN NA NA
## 9 0.8053478 0.25 9 NaN NaN NA NA
## 10 0.8053478 0.50 1 0.9466667 0.92 0.05055250 0.07582875
## 11 0.8053478 0.50 2 NaN NaN NA NA
## 12 0.8053478 0.50 3 NaN NaN NA NA
## 13 0.8053478 0.50 4 NaN NaN NA NA
## 14 0.8053478 0.50 5 NaN NaN NA NA
## 15 0.8053478 0.50 6 NaN NaN NA NA
## 16 0.8053478 0.50 7 NaN NaN NA NA
## 17 0.8053478 0.50 8 NaN NaN NA NA
## 18 0.8053478 0.50 9 NaN NaN NA NA
## 19 0.8053478 1.00 1 0.9400000 0.91 0.05477226 0.08215838
## 20 0.8053478 1.00 2 NaN NaN NA NA
## 21 0.8053478 1.00 3 NaN NaN NA NA
## 22 0.8053478 1.00 4 NaN NaN NA NA
## 23 0.8053478 1.00 5 NaN NaN NA NA
## 24 0.8053478 1.00 6 NaN NaN NA NA
## 25 0.8053478 1.00 7 NaN NaN NA NA
## 26 0.8053478 1.00 8 NaN NaN NA NA
## 27 0.8053478 1.00 9 NaN NaN NA NA
## 28 0.8053478 2.00 1 0.9400000 0.91 0.05477226 0.08215838
## 29 0.8053478 2.00 2 NaN NaN NA NA
## 30 0.8053478 2.00 3 NaN NaN NA NA
## 31 0.8053478 2.00 4 NaN NaN NA NA
## 32 0.8053478 2.00 5 NaN NaN NA NA
## 33 0.8053478 2.00 6 NaN NaN NA NA
## 34 0.8053478 2.00 7 NaN NaN NA NA
## 35 0.8053478 2.00 8 NaN NaN NA NA
## 36 0.8053478 2.00 9 NaN NaN NA NA
## 37 0.8053478 4.00 1 0.9400000 0.91 0.05962848 0.08944272
## 38 0.8053478 4.00 2 NaN NaN NA NA
## 39 0.8053478 4.00 3 NaN NaN NA NA
## 40 0.8053478 4.00 4 NaN NaN NA NA
## 41 0.8053478 4.00 5 NaN NaN NA NA
## 42 0.8053478 4.00 6 NaN NaN NA NA
## 43 0.8053478 4.00 7 NaN NaN NA NA
## 44 0.8053478 4.00 8 NaN NaN NA NA
## 45 0.8053478 4.00 9 NaN NaN NA NA
## 46 0.8053478 8.00 1 0.9333333 0.90 0.05270463 0.07905694
## 47 0.8053478 8.00 2 NaN NaN NA NA
## 48 0.8053478 8.00 3 NaN NaN NA NA
## 49 0.8053478 8.00 4 NaN NaN NA NA
## 50 0.8053478 8.00 5 NaN NaN NA NA
## 51 0.8053478 8.00 6 NaN NaN NA NA
## 52 0.8053478 8.00 7 NaN NaN NA NA
## 53 0.8053478 8.00 8 NaN NaN NA NA
## 54 0.8053478 8.00 9 NaN NaN NA NA
## 55 0.8053478 16.00 1 0.9400000 0.91 0.05477226 0.08215838
## 56 0.8053478 16.00 2 NaN NaN NA NA
## 57 0.8053478 16.00 3 NaN NaN NA NA
## 58 0.8053478 16.00 4 NaN NaN NA NA
## 59 0.8053478 16.00 5 NaN NaN NA NA
## 60 0.8053478 16.00 6 NaN NaN NA NA
## 61 0.8053478 16.00 7 NaN NaN NA NA
## 62 0.8053478 16.00 8 NaN NaN NA NA
## 63 0.8053478 16.00 9 NaN NaN NA NA
## 64 0.8053478 32.00 1 0.9333333 0.90 0.05270463 0.07905694
## 65 0.8053478 32.00 2 NaN NaN NA NA
## 66 0.8053478 32.00 3 NaN NaN NA NA
## 67 0.8053478 32.00 4 NaN NaN NA NA
## 68 0.8053478 32.00 5 NaN NaN NA NA
## 69 0.8053478 32.00 6 NaN NaN NA NA
## 70 0.8053478 32.00 7 NaN NaN NA NA
## 71 0.8053478 32.00 8 NaN NaN NA NA
## 72 0.8053478 32.00 9 NaN NaN NA NA
## 73 0.8053478 64.00 1 0.9333333 0.90 0.05270463 0.07905694
## 74 0.8053478 64.00 2 NaN NaN NA NA
## 75 0.8053478 64.00 3 NaN NaN NA NA
## 76 0.8053478 64.00 4 NaN NaN NA NA
## 77 0.8053478 64.00 5 NaN NaN NA NA
## 78 0.8053478 64.00 6 NaN NaN NA NA
## 79 0.8053478 64.00 7 NaN NaN NA NA
## 80 0.8053478 64.00 8 NaN NaN NA NA
## 81 0.8053478 64.00 9 NaN NaN NA NA
# Plots
plot(iris.svm.m4, main = "Accuracy: iris.svm.m4")plot(varImp(iris.svm.m4), main = "Var Imp: iris.svm.m4")# In-sample fit
iris.svm.m4.trn.pred = predict(iris.svm.m4, newdata = iris[, -5])
iris.svm.m4.trn.cm = confusionMatrix(iris.svm.m4.trn.pred, iris$Species)
iris.svm.m4.trn.cm$table## Reference
## Prediction setosa versicolor virginica
## setosa 50 0 0
## versicolor 0 48 1
## virginica 0 2 49
iris.svm.m4.trn.cm$overall[1:2]## Accuracy Kappa
## 0.98 0.97
See model notes in comments below for description.
#--------------------------------------
# Model 5
#--------------------------------------
# Model notes:
# warnings() == F
# sigma == varied, 30 times, each = 1
# C == varied, 30 times, each = 1
# Weight == NULL
# Specify fit parameters
iris.svm.m5.fc = trainControl(method = "cv",
number = 5,
classProbs = T,
search = "random")
# Build model
ptm = proc.time()
set.seed(123)
iris.svm.m5 = train(x = iris[, -5],
y = iris[, 5],
method = "svmRadialSigma",
preProcess = c("center", "scale"),
trControl = iris.svm.m5.fc,
tuneLength = 30)
proc.time() - ptm; rm(ptm)## user system elapsed
## 8.63 0.01 9.25
# In-sample summary
iris.svm.m5$finalModel## Support Vector Machine object of class "ksvm"
##
## SV type: C-svc (classification)
## parameter : cost C = 20.0917932217118
##
## Gaussian Radial Basis kernel function.
## Hyperparameter : sigma = 0.0121057487678656
##
## Number of Support Vectors : 36
##
## Objective Function Value : -38.9619 -13.9598 -442.5868
## Training error : 0.013333
## Probability model included.
iris.svm.m5$results## sigma C Accuracy Kappa AccuracySD KappaSD
## 1 0.005559917 24.6230114 0.9600000 0.94 0.02788867 0.04183300
## 2 0.005615149 0.6131365 0.8600000 0.79 0.07958224 0.11937336
## 3 0.006843156 884.8643390 0.9400000 0.91 0.02788867 0.04183300
## 4 0.006907347 2.4346906 0.9200000 0.88 0.06912147 0.10368221
## 5 0.008156860 0.3244572 0.8533333 0.78 0.07673910 0.11510864
## 6 0.012105749 20.0917932 0.9666667 0.95 0.03333333 0.05000000
## 7 0.017212926 6.6165406 0.9600000 0.94 0.02788867 0.04183300
## 8 0.018610198 44.0358465 0.9533333 0.93 0.03800585 0.05700877
## 9 0.036015640 1.7712732 0.9400000 0.91 0.04346135 0.06519202
## 10 0.057815278 730.1466331 0.9466667 0.92 0.04472136 0.06708204
## 11 0.081708863 0.9013597 0.9400000 0.91 0.04346135 0.06519202
## 12 0.092768011 0.3388392 0.9466667 0.92 0.06055301 0.09082951
## 13 0.093371792 4.9115434 0.9533333 0.93 0.04472136 0.06708204
## 14 0.123951273 2.3311309 0.9533333 0.93 0.05055250 0.07582875
## 15 0.206779079 25.2610679 0.9600000 0.94 0.04346135 0.06519202
## 16 0.228381735 0.1081475 0.9133333 0.87 0.06497863 0.09746794
## 17 0.260668468 5.2361202 0.9533333 0.93 0.03800585 0.05700877
## 18 0.285974066 813.3886629 0.9400000 0.91 0.04346135 0.06519202
## 19 0.878209540 0.8202080 0.9533333 0.93 0.03800585 0.05700877
## 20 1.602140462 0.1702887 0.9266667 0.89 0.09249625 0.13874437
## 21 1.727948320 7.4149908 0.9333333 0.90 0.05270463 0.07905694
## 22 1.970093673 182.3813801 0.9400000 0.91 0.05962848 0.08944272
## 23 2.951843012 67.0405019 0.9333333 0.90 0.05270463 0.07905694
## 24 3.087233091 19.1401343 0.9333333 0.90 0.05270463 0.07905694
## 25 3.581284871 1.4673591 0.9333333 0.90 0.05270463 0.07905694
## 26 4.680919388 861.2287744 0.9333333 0.90 0.05270463 0.07905694
## 27 5.421950684 0.1292381 0.8666667 0.80 0.10540926 0.15811388
## 28 5.561504283 20.3853835 0.9200000 0.88 0.07673910 0.11510864
## 29 8.930132700 184.6730840 0.8866667 0.83 0.08027730 0.12041595
## 30 17.742305737 32.7080254 0.8400000 0.76 0.10110501 0.15165751
# In-sample fit
iris.svm.m5.trn.pred = predict(iris.svm.m5, newdata = iris[, -5])
iris.svm.m5.trn.cm = confusionMatrix(iris.svm.m5.trn.pred, iris$Species)
iris.svm.m5.trn.cm$table## Reference
## Prediction setosa versicolor virginica
## setosa 50 0 0
## versicolor 0 49 1
## virginica 0 1 49
iris.svm.m5.trn.cm$overall[1:2]## Accuracy Kappa
## 0.9866667 0.9800000
The table below shows a comparison of the different models and performance.
#--------------------------------------
# Model Comparison
#--------------------------------------
# Model Types
model.types = cbind(c(rep("SVM", each = 5)))
# Model Names
model.names = c("M1", "M2", "M3", "M4", "M5")
# Accuracy, Train
model.trn.acc = rbind(iris.svm.m1.trn.cm$overall[1],
iris.svm.m2.trn.cm$overall[1],
iris.svm.m3.trn.cm$overall[1],
iris.svm.m4.trn.cm$overall[1],
iris.svm.m5.trn.cm$overall[1])
# Kappa, Train
model.trn.kpp = rbind(iris.svm.m1.trn.cm$overall[2],
iris.svm.m2.trn.cm$overall[2],
iris.svm.m3.trn.cm$overall[2],
iris.svm.m4.trn.cm$overall[2],
iris.svm.m5.trn.cm$overall[2])
# Data Frame
model.comp = data.frame(model.types,
model.names,
model.trn.acc,
model.trn.kpp)
rownames(model.comp) = 1:nrow(model.comp)
colnames(model.comp) = c("Model Type",
"Model Name",
"Train: Accuracy",
"Train: Kappa")
pander(model.comp, caption = "", alignment = "left")| Model Type | Model Name | Train: Accuracy | Train: Kappa |
|---|---|---|---|
| SVM | M1 | 0.96 | 0.94 |
| SVM | M2 | 0.98 | 0.97 |
| SVM | M3 | 0.9733 | 0.96 |
| SVM | M4 | 0.98 | 0.97 |
| SVM | M5 | 0.9867 | 0.98 |
sessionInfo()## R version 3.3.1 (2016-06-21)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 10586)
##
## locale:
## [1] LC_COLLATE=English_United States.1252
## [2] LC_CTYPE=English_United States.1252
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C
## [5] LC_TIME=English_United States.1252
##
## attached base packages:
## [1] stats graphics grDevices utils datasets methods base
##
## other attached packages:
## [1] kernlab_0.9-24 pander_0.6.0 caret_6.0-71 ggplot2_2.1.0
## [5] lattice_0.20-33
##
## loaded via a namespace (and not attached):
## [1] Rcpp_0.12.6 compiler_3.3.1 formatR_1.4
## [4] nloptr_1.0.4 plyr_1.8.4 class_7.3-14
## [7] iterators_1.0.8 tools_3.3.1 digest_0.6.10
## [10] lme4_1.1-12 evaluate_0.9 nlme_3.1-128
## [13] gtable_0.2.0 mgcv_1.8-13 Matrix_1.2-6
## [16] foreach_1.4.3 yaml_2.1.13 parallel_3.3.1
## [19] SparseM_1.7 e1071_1.6-7 stringr_1.0.0
## [22] knitr_1.13.1 pROC_1.8 MatrixModels_0.4-1
## [25] stats4_3.3.1 grid_3.3.1 nnet_7.3-12
## [28] rmarkdown_1.0 minqa_1.2.4 reshape2_1.4.1
## [31] car_2.1-2 magrittr_1.5 scales_0.4.0
## [34] codetools_0.2-14 htmltools_0.3.5 MASS_7.3-45
## [37] splines_3.3.1 pbkrtest_0.4-6 colorspace_1.2-6
## [40] quantreg_5.26 stringi_1.1.1 munsell_0.4.3