library(caret)
## Loading required package: lattice
## Loading required package: ggplot2
library(data.table)
library(rpart)
library(rpart.plot)
library(RColorBrewer)
library(rattle)
## Loading required package: RGtk2
## Rattle: A free graphical interface for data mining with R.
## Version 3.5.0 Copyright (c) 2006-2015 Togaware Pty Ltd.
## Type 'rattle()' to shake, rattle, and roll your data.
library(randomForest)
## randomForest 4.6-12
## Type rfNews() to see new features/changes/bug fixes.
library(knitr)
library(mice)
## Loading required package: Rcpp
## mice 2.22 2014-06-10
library(ROCR)
## Loading required package: gplots
##
## Attaching package: 'gplots'
##
## The following object is masked from 'package:stats':
##
## lowess
library(outliers)
##
## Attaching package: 'outliers'
##
## The following object is masked from 'package:randomForest':
##
## outlier
library(plyr)
library(boot)
##
## Attaching package: 'boot'
##
## The following object is masked from 'package:lattice':
##
## melanoma
###
training <- read.csv("train.csv", sep=",") ##Having inspected the dataset, the columns are seperated with comas.
validation <- read.csv("test.csv", sep=",")
summary(training)
summary(validation)
lapply(training,class)
lapply(validation,class)
Appropriate transformations and feature engineering on both datasets. Exploratory data analysis to reveal skweness, outliers etc.
##PassengerID is an ID variable, hence it must be removed.
training <- training[,-1]
validation <- validation[,-1]
##Survived is a categorical variable, its class should be transformed to factor class.
class(training$Survived) ##Jesus, its an integer
## [1] "integer"
training$Survived <- as.factor(training$Survived)
table(training$Survived)
##
## 0 1
## 549 342
sum(is.na(training$Survived))/length(training$Survived)#No NAs
## [1] 0
##Pclass is a categorical variable, its class should be transformed to factor.
##Training set
class(training$Pclass)
## [1] "integer"
training$Pclass <- as.factor(training$Pclass)
table(training$Pclass)
##
## 1 2 3
## 216 184 491
sum(is.na(training$Pclass))/length(training$Pclass) #No NAs
## [1] 0
##Validation set
class(validation$Pclass)
## [1] "integer"
validation$Pclass <- as.factor(validation$Pclass)
table(validation$Pclass)
##
## 1 2 3
## 107 93 218
sum(is.na(validation$Pclass))/length(validation$Pclass) #No NAs
## [1] 0
##Name variable is useless, hence it gets removed from both datasets
training <- training[,-3]
validation <- validation[,-2]
##Sex variable is categorical, and its class should be factor.
class(training$Sex) #Aleeady of a factor type, no transformation necessary.
## [1] "factor"
table(training$Sex)
##
## female male
## 314 577
sum(is.na(training$Sex))/length(is.na(training$Sex))#No NAs
## [1] 0
class(validation$Sex) #Aleeady of a factor type, no transformation necessary.
## [1] "factor"
table(validation$Sex)
##
## female male
## 152 266
sum(is.na(training$Sex))/length(is.na(training$Sex))#No NAs
## [1] 0
##Age variable should be numeric, which is so already, hence no action needed
class(training$Age) #Already of a numeric type, no transormation necessary
## [1] "numeric"
summary(training$Age, na.rm=TRUE)
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 0.42 20.12 28.00 29.70 38.00 80.00 177
hist(training$Age) #Slightly rightly skewed, nothing to worry about.
boxplot(training$Age) #Few outliers, no big deal
sum(is.na(training$Age))/length(training$Age) # 20% missing values. We will check against CV accuracy the removal, NA imputation, or just complete row removal.
## [1] 0.1986532
class(validation$Age) #Already of a numeric type, no transormation necessary
## [1] "numeric"
summary(validation$Age, na.rm=TRUE)
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 0.17 21.00 27.00 30.27 39.00 76.00 86
hist(training$Age) #Slightly rightly skewed, nothing to worry about.
boxplot(training$Age) #Few outliers, no big deal
sum(is.na(training$Age))/length(training$Age) # 20% missing values. We will check against CV accuracy the removal, NA imputation, or just complete row removal.
## [1] 0.1986532
##SibSp is of a integer class, which is already
class(training$SibSp)
## [1] "integer"
table(training$SibSp)
##
## 0 1 2 3 4 5 8
## 608 209 28 16 18 5 7
sum(is.na(training$SibSp))/length(is.na(training$SibSp))#No NAs
## [1] 0
class(validation$SibSp)
## [1] "integer"
table(validation$SibSp)
##
## 0 1 2 3 4 5 8
## 283 110 14 4 4 1 2
sum(is.na(validation$SibSp))/length(is.na(validation$SibSp))#No NAs
## [1] 0
##Parch is of an integer class, which is already, hence no action necessary
class(training$Parch) #Alreadof the correct type, no transformation necessary
## [1] "integer"
table(training$Parch)
##
## 0 1 2 3 4 5 6
## 678 118 80 5 4 5 1
sum(is.na(training$Parch))/length(training$Parch) #No NAs
## [1] 0
class(validation$Parch) #Alreadof the correct type, no transformation necessary
## [1] "integer"
table(training$Parch)
##
## 0 1 2 3 4 5 6
## 678 118 80 5 4 5 1
sum(is.na(validation$Parch))/length(validation$Parch) #No NAs
## [1] 0
####Ticket variable is useless. Hence it gets removed.
training <- training[,-7]
validation <- validation[,-6]
##Fare variable is
class(training$Fare) #already registerd as numeric class, hence no action necessary
## [1] "numeric"
summary(training$Fare)
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.00 7.91 14.45 32.20 31.00 512.30
hist(training$Fare) #Heavily skewed, maybe a logarithmic transformation could prove usefull. Will check against CV accuracy.
boxplot(training$Fare) #One outlier, that is considerably, out of normal. Will remove this datapoint.
sum(is.na(training$Fare))/length(training$Fare)#No NAs
## [1] 0
which.max(training$Fare)
## [1] 259
training$Fare[259]
## [1] 512.3292
class(validation$Fare) #already registerd as numeric class, hence no action necessary
## [1] "numeric"
summary(validation$Fare)
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 0.000 7.896 14.450 35.630 31.500 512.300 1
hist(validation$Fare) #Heavily skewed, maybe a logarithmic transformation could prove usefull. Will check against CV accuracy.
boxplot(validation$Fare) #One outlier, that is considerably, out of normal. Will remove this datapoint.
sum(is.na(training$Fare))/length(training$Fare)#No NAs
## [1] 0
which.max(validation$Fare)
## [1] 344
validation$Fare[344]
## [1] 512.3292
##Cabin variable should be transformed to provide ie discrimination among those who had cabin and those who did not. Hence, we transform the variable in a binary one with 0,1 levels.
class(training$Cabin)
## [1] "factor"
training$Cabin <- as.character(training$Cabin)
training$Cabin[training$Cabin==""] <- 0
training$Cabin[!training$Cabin==0] <- 1
training$Cabin <- as.factor(training$Cabin)
table(training$Cabin)
##
## 0 1
## 687 204
sum(is.na(training$Cabin))/length(training$Cabin)#No NAs
## [1] 0
class(validation$Cabin)
## [1] "factor"
validation$Cabin <- as.character(validation$Cabin)
validation$Cabin[validation$Cabin==""] <- 0
validation$Cabin[!validation$Cabin==0] <- 1
validation$Cabin <- as.factor(validation$Cabin)
table(validation$Cabin)
##
## 0 1
## 327 91
sum(is.na(validation$Cabin))/length(validation$Cabin)#No NAs
## [1] 0
##Embarked
class(training$Embarked) # Already of a factor type, no transformation necessary
## [1] "factor"
table(training$Embarked) # NAs were considered as a factor level, transformation need
##
## C Q S
## 2 168 77 644
training$Embarked[training$Embarked==""] <- 0
## Warning in `[<-.factor`(`*tmp*`, training$Embarked == "", value =
## structure(c(4L, : invalid factor level, NA generated
sum(is.na(training$Embarked))/length(training$Embarked) ##Very few NAs
## [1] 0.002244669
class(validation$Embarked) # Already of a factor type, no transformation necessary
## [1] "factor"
table(validation$Embarked)
##
## C Q S
## 102 46 270
sum(is.na(validation$Embarked))/length(validation$Embarked) ##No NAs
## [1] 0
Here, duplicated records are checked for
##Let's check for duplicate records by counting how many are there, if any.
sum(duplicated(training)==TRUE)##Ohh, significant number of duplicate records
## [1] 107
The amount of duplicate records is 107, which is significant. The duplicate records must be removed from the training dataset.
training_unique <- unique(training)
rm(training)##The initial training set is no longer needed.
Based on the summary results we see that missing values exist. Most NAs are found at the Age variable. Two strategies are checked. The first will be to impute missing values in Age variable with the variable mean value, and then perform omition of rows containing NAs. The second will be to delete all rows with missing values.
##Imputing Age
training_Age <- training_unique
training_Age$Age[is.na(training_unique$Age)] <- mean(training_unique$Age,na.rm=TRUE)
training_Age <- na.omit(training_Age)
validation_Age <- validation
validation_Age$Age[is.na(validation_Age$Age)] <- mean(validation_Age$Age,na.rm=TRUE)
validation_Age <- na.omit(validation_Age)
training_na <- na.omit(training_unique)
validation_na <- na.omit(validation)
All following checks will be held on training_Age. Final checks will include training_na
#Numeric attributes
cor(training_Age[,c(4,5,6,7)]) # Some correlation, but no action will be employed
## Age SibSp Parch Fare
## Age 1.00000000 -0.2799495 -0.1873354 0.08639407
## SibSp -0.27994952 1.0000000 0.3810158 0.13650684
## Parch -0.18733541 0.3810158 1.0000000 0.19329489
## Fare 0.08639407 0.1365068 0.1932949 1.00000000
#Factor attributes, nothing interesting
5-fold Cross Validation on accuracy, averaged over 5 repetitions will be the strategy to select the best possible model.
##Let's define a trainControl setting, that will remain the same for all applied models thereon
fitControl <- trainControl(## 10-fold CV
method = "repeatedcv",
number = 5,
#classProbs = TRUE,
## repeated ten times
repeats = 5)
Let’s set the formula including all variables
##Set the formula
formula <- Survived~Pclass+Sex+Age+SibSp+Parch+Fare+Cabin+Embarked
The criterion is a models Accuracy = (TF+TP)/(TF+FF+FP+TP) ,which we opt to maximize. Under this criterion we shall compare a number of classifiers using the excellent “caret” package.
set.seed(1000)
logisticReg <- train(formula,
data = training_Age,
method = "glm",
#metric = "ROC",
trControl = fitControl)
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
logisticReg
## Generalized Linear Model
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 625, ...
## Resampling results
##
## Accuracy Kappa Accuracy SD Kappa SD
## 0.7813753 0.5437916 0.03270276 0.06892272
##
##
##Accuracy 0.7813753 Accuracy (Stand.Dev 0.03270276)
set.seed(1000)
BayesianLogReg <- train(formula,
data = training_Age,
method = "bayesglm",
trControl = fitControl)
## Loading required package: arm
## Loading required package: MASS
## Loading required package: Matrix
## Loading required package: lme4
##
## arm (Version 1.8-6, built: 2015-7-7)
##
## Working directory is D:/Data_Science_Projects/Kaggle/Titanic
##
##
## Attaching package: 'arm'
##
## The following object is masked from 'package:boot':
##
## logit
BayesianLogReg
## Bayesian Generalized Linear Model
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 625, ...
## Resampling results
##
## Accuracy Kappa Accuracy SD Kappa SD
## 0.7821429 0.5453747 0.03264758 0.06862032
##
##
##Accuracy 0.7821429 (Stand.Dev 0.03264758)
set.seed(1000)
##tuning for complexity parameter (cp)
rpartTune1 <- train(training_Age[,c(2,3,4,5,6,7,8,9)], training_Age$Survived,
method = "rpart",
tuneLength = 10,
trControl = fitControl)
plot(rpartTune1)
rpartTune1
## CART
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 625, ...
## Resampling results across tuning parameters:
##
## cp Accuracy Kappa Accuracy SD Kappa SD
## 0.00000000 0.7713981 0.5195163 0.03267479 0.06791937
## 0.04811353 0.7634656 0.5013339 0.02769522 0.06197579
## 0.09622707 0.7673117 0.5120282 0.03082519 0.06651355
## 0.14434060 0.7673117 0.5120282 0.03082519 0.06651355
## 0.19245414 0.7673117 0.5120282 0.03082519 0.06651355
## 0.24056767 0.7673117 0.5120282 0.03082519 0.06651355
## 0.28868120 0.7673117 0.5120282 0.03082519 0.06651355
## 0.33679474 0.7673117 0.5120282 0.03082519 0.06651355
## 0.38490827 0.7673117 0.5120282 0.03082519 0.06651355
## 0.43302181 0.6697641 0.2403141 0.07970959 0.23704117
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was cp = 0.
##Accuracy 0.7769606 (Stand.Dev. 0.02825927)
##tuning for maximum node depth (maxdepth)
rpartTune2 <- train(training_Age[,c(2,3,4,5,6,7,8,9)], training_Age$Survived,
method = "rpart2",
tuneLength = 10,
trControl = fitControl)
## note: only 8 possible values of the max tree depth from the initial fit.
## Truncating the grid to 8 .
plot(rpartTune2)
rpartTune2
## CART
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 626, 625, 626, 625, 626, ...
## Resampling results across tuning parameters:
##
## maxdepth Accuracy Kappa Accuracy SD Kappa SD
## 1 0.7672775 0.5118670 0.02998430 0.06530389
## 3 0.7770015 0.5238072 0.02521525 0.05919326
## 4 0.7749600 0.5158499 0.02649617 0.05981048
## 7 0.7767353 0.5233229 0.02513619 0.05227189
## 12 0.7757064 0.5220013 0.02583147 0.05357595
## 16 0.7757064 0.5220013 0.02583147 0.05357595
## 20 0.7757064 0.5220013 0.02583147 0.05357595
## 21 0.7757064 0.5220013 0.02583147 0.05357595
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was maxdepth = 3.
##Accuracy 0.7759268 (Stand.Dev. 0.02634301)
set.seed(1000)
rfGrid = expand.grid(.mtry = c(1,2,3,4,5))
randomForestFit = train(x = training_Age[,c(2,3,4,5,6,7,8,9)],
y = training_Age$Survived,
method = "rf",
trControl = fitControl,
tuneGrid = rfGrid,
ntree=30)
plot(randomForestFit)
randomForestFit
## Random Forest
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 625, ...
## Resampling results across tuning parameters:
##
## mtry Accuracy Kappa Accuracy SD Kappa SD
## 1 0.7762926 0.5168517 0.03526116 0.07769250
## 2 0.7931474 0.5612414 0.03202731 0.06974171
## 3 0.7967534 0.5713995 0.03508362 0.07558851
## 4 0.7885857 0.5560803 0.03391657 0.07259389
## 5 0.7793679 0.5377080 0.03684067 0.07889677
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 3.
varImp(randomForestFit)
## rf variable importance
##
## Overall
## Sex 100.0000
## Age 77.5436
## Fare 76.6489
## Pclass 23.5638
## Cabin 5.0015
## SibSp 3.1171
## Parch 0.4259
## Embarked 0.0000
##Accuracy 0.7967534 (Stand.Dev. 0.03508362)
gbmGrid <- expand.grid(interaction.depth = c(1, 2, 3),
n.trees = (1:10)*5,
shrinkage = (1:3)*0.1,
n.minobsinnode = (1:3)*10)
gbmFit <- train(formula, data = training_Age,
method = "gbm",
trControl = fitControl,
## This last option is actually one
## for gbm() that passes through
verbose = FALSE,
tuneGrid = gbmGrid)
## Loading required package: gbm
## Loading required package: survival
##
## Attaching package: 'survival'
##
## The following object is masked from 'package:boot':
##
## aml
##
## The following object is masked from 'package:caret':
##
## cluster
##
## Loading required package: splines
## Loading required package: parallel
## Loaded gbm 2.1.1
gbmFit
## Stochastic Gradient Boosting
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 626, ...
## Resampling results across tuning parameters:
##
## shrinkage interaction.depth n.minobsinnode n.trees Accuracy
## 0.1 1 10 5 0.7659847
## 0.1 1 10 10 0.7672586
## 0.1 1 10 15 0.7672586
## 0.1 1 10 20 0.7664894
## 0.1 1 10 25 0.7675150
## 0.1 1 10 30 0.7695582
## 0.1 1 10 35 0.7675232
## 0.1 1 10 40 0.7739139
## 0.1 1 10 45 0.7759521
## 0.1 1 10 50 0.7797966
## 0.1 1 20 5 0.7652139
## 0.1 1 20 10 0.7662395
## 0.1 1 20 15 0.7659847
## 0.1 1 20 20 0.7659847
## 0.1 1 20 25 0.7664976
## 0.1 1 20 30 0.7687987
## 0.1 1 20 35 0.7690503
## 0.1 1 20 40 0.7675151
## 0.1 1 20 45 0.7693034
## 0.1 1 20 50 0.7744267
## 0.1 1 30 5 0.7659766
## 0.1 1 30 10 0.7670022
## 0.1 1 30 15 0.7672586
## 0.1 1 30 20 0.7672586
## 0.1 1 30 25 0.7652139
## 0.1 1 30 30 0.7664878
## 0.1 1 30 35 0.7685391
## 0.1 1 30 40 0.7698227
## 0.1 1 30 45 0.7718675
## 0.1 1 30 50 0.7741752
## 0.1 2 10 5 0.7762152
## 0.1 2 10 10 0.7652106
## 0.1 2 10 15 0.7723771
## 0.1 2 10 20 0.7813221
## 0.1 2 10 25 0.7882337
## 0.1 2 10 30 0.7913172
## 0.1 2 10 35 0.7936167
## 0.1 2 10 40 0.8002687
## 0.1 2 10 45 0.8000058
## 0.1 2 10 50 0.8000009
## 0.1 2 20 5 0.7762152
## 0.1 2 20 10 0.7682762
## 0.1 2 20 15 0.7636739
## 0.1 2 20 20 0.7644349
## 0.1 2 20 25 0.7741508
## 0.1 2 20 30 0.7782599
## 0.1 2 20 35 0.7815867
## 0.1 2 20 40 0.7841362
## 0.1 2 20 45 0.7900238
## 0.1 2 20 50 0.7923250
## 0.1 2 30 5 0.7762152
## 0.1 2 30 10 0.7700630
## 0.1 2 30 15 0.7667572
## 0.1 2 30 20 0.7667491
## 0.1 2 30 25 0.7733962
## 0.1 2 30 30 0.7787645
## 0.1 2 30 35 0.7795354
## 0.1 2 30 40 0.7800433
## 0.1 2 30 45 0.7823493
## 0.1 2 30 50 0.7849200
## 0.1 3 10 5 0.7769828
## 0.1 3 10 10 0.7779971
## 0.1 3 10 15 0.7833784
## 0.1 3 10 20 0.7897804
## 0.1 3 10 25 0.7977079
## 0.1 3 10 30 0.7977080
## 0.1 3 10 35 0.8010396
## 0.1 3 10 40 0.8018072
## 0.1 3 10 45 0.8015459
## 0.1 3 10 50 0.8041051
## 0.1 3 20 5 0.7762152
## 0.1 3 20 10 0.7736478
## 0.1 3 20 15 0.7792953
## 0.1 3 20 20 0.7820913
## 0.1 3 20 25 0.7882419
## 0.1 3 20 30 0.7879823
## 0.1 3 20 35 0.7949004
## 0.1 3 20 40 0.7933604
## 0.1 3 20 45 0.7933604
## 0.1 3 20 50 0.7956485
## 0.1 3 30 5 0.7762152
## 0.1 3 30 10 0.7744203
## 0.1 3 30 15 0.7739204
## 0.1 3 30 20 0.7813253
## 0.1 3 30 25 0.7797787
## 0.1 3 30 30 0.7800368
## 0.1 3 30 35 0.7869534
## 0.1 3 30 40 0.7856729
## 0.1 3 30 45 0.7851634
## 0.1 3 30 50 0.7838846
## 0.2 1 10 5 0.7621419
## 0.2 1 10 10 0.7670071
## 0.2 1 10 15 0.7698146
## 0.2 1 10 20 0.7716144
## 0.2 1 10 25 0.7826302
## 0.2 1 10 30 0.7856957
## 0.2 1 10 35 0.7874743
## 0.2 1 10 40 0.7902997
## 0.2 1 10 45 0.7884999
## 0.2 1 10 50 0.7890209
## 0.2 1 20 5 0.7680230
## 0.2 1 20 10 0.7659766
## 0.2 1 20 15 0.7646978
## 0.2 1 20 20 0.7716078
## 0.2 1 20 25 0.7723852
## 0.2 1 20 30 0.7764845
## 0.2 1 20 35 0.7808337
## 0.2 1 20 40 0.7867067
## 0.2 1 20 45 0.7861939
## 0.2 1 20 50 0.7867100
## 0.2 1 30 5 0.7649575
## 0.2 1 30 10 0.7664959
## 0.2 1 30 15 0.7682810
## 0.2 1 30 20 0.7708435
## 0.2 1 30 25 0.7762102
## 0.2 1 30 30 0.7754459
## 0.2 1 30 35 0.7759604
## 0.2 1 30 40 0.7756990
## 0.2 1 30 45 0.7792953
## 0.2 1 30 50 0.7772473
## 0.2 2 10 5 0.7695728
## 0.2 2 10 10 0.7803192
## 0.2 2 10 15 0.7946213
## 0.2 2 10 20 0.8007620
## 0.2 2 10 25 0.8012944
## 0.2 2 10 30 0.8005220
## 0.2 2 10 35 0.7977128
## 0.2 2 10 40 0.8002607
## 0.2 2 10 45 0.7984707
## 0.2 2 10 50 0.8005072
## 0.2 2 20 5 0.7687841
## 0.2 2 20 10 0.7716160
## 0.2 2 20 15 0.7823673
## 0.2 2 20 20 0.7882419
## 0.2 2 20 25 0.7920799
## 0.2 2 20 30 0.7920815
## 0.2 2 20 35 0.7951520
## 0.2 2 20 40 0.7969322
## 0.2 2 20 45 0.7948874
## 0.2 2 20 50 0.7954002
## 0.2 2 30 5 0.7692970
## 0.2 2 30 10 0.7721191
## 0.2 2 30 15 0.7698114
## 0.2 2 30 20 0.7782533
## 0.2 2 30 25 0.7833847
## 0.2 2 30 30 0.7841361
## 0.2 2 30 35 0.7818350
## 0.2 2 30 40 0.7836184
## 0.2 2 30 45 0.7892496
## 0.2 2 30 50 0.7925862
## 0.2 3 10 5 0.7754541
## 0.2 3 10 10 0.7887596
## 0.2 3 10 15 0.7961809
## 0.2 3 10 20 0.7948939
## 0.2 3 10 25 0.7989933
## 0.2 3 10 30 0.8020473
## 0.2 3 10 35 0.8002720
## 0.2 3 10 40 0.8020523
## 0.2 3 10 45 0.8025618
## 0.2 3 10 50 0.8038439
## 0.2 3 20 5 0.7690600
## 0.2 3 20 10 0.7831252
## 0.2 3 20 15 0.7869453
## 0.2 3 20 20 0.7933474
## 0.2 3 20 25 0.7915509
## 0.2 3 20 30 0.7943730
## 0.2 3 20 35 0.7920686
## 0.2 3 20 40 0.7959033
## 0.2 3 20 45 0.7935989
## 0.2 3 20 50 0.7974402
## 0.2 3 30 5 0.7705774
## 0.2 3 30 10 0.7736641
## 0.2 3 30 15 0.7805578
## 0.2 3 30 20 0.7856713
## 0.2 3 30 25 0.7912862
## 0.2 3 30 30 0.7900123
## 0.2 3 30 35 0.7902574
## 0.2 3 30 40 0.7941035
## 0.2 3 30 45 0.7961564
## 0.2 3 30 50 0.7969240
## 0.3 1 10 5 0.7675150
## 0.3 1 10 10 0.7700677
## 0.3 1 10 15 0.7736542
## 0.3 1 10 20 0.7744202
## 0.3 1 10 25 0.7823591
## 0.3 1 10 30 0.7823689
## 0.3 1 10 35 0.7844137
## 0.3 1 10 40 0.7897917
## 0.3 1 10 45 0.7810852
## 0.3 1 10 50 0.7818610
## 0.3 1 20 5 0.7657365
## 0.3 1 20 10 0.7669989
## 0.3 1 20 15 0.7736591
## 0.3 1 20 20 0.7757104
## 0.3 1 20 25 0.7780213
## 0.3 1 20 30 0.7823771
## 0.3 1 20 35 0.7831349
## 0.3 1 20 40 0.7849281
## 0.3 1 20 45 0.7877405
## 0.3 1 20 50 0.7913221
## 0.3 1 30 5 0.7667491
## 0.3 1 30 10 0.7693116
## 0.3 1 30 15 0.7736656
## 0.3 1 30 20 0.7728948
## 0.3 1 30 25 0.7754524
## 0.3 1 30 30 0.7821060
## 0.3 1 30 35 0.7813286
## 0.3 1 30 40 0.7805562
## 0.3 1 30 45 0.7795370
## 0.3 1 30 50 0.7785195
## 0.3 2 10 5 0.7736429
## 0.3 2 10 10 0.7895142
## 0.3 2 10 15 0.7971756
## 0.3 2 10 20 0.8030714
## 0.3 2 10 25 0.8012814
## 0.3 2 10 30 0.8015329
## 0.3 2 10 35 0.7966676
## 0.3 2 10 40 0.7987124
## 0.3 2 10 45 0.8007507
## 0.3 2 10 50 0.7994800
## 0.3 2 20 5 0.7667540
## 0.3 2 20 10 0.7805692
## 0.3 2 20 15 0.7892659
## 0.3 2 20 20 0.7930925
## 0.3 2 20 25 0.7994768
## 0.3 2 20 30 0.7966660
## 0.3 2 20 35 0.7958854
## 0.3 2 20 40 0.7941116
## 0.3 2 20 45 0.7984462
## 0.3 2 20 50 0.7969176
## 0.3 2 30 5 0.7639287
## 0.3 2 30 10 0.7693068
## 0.3 2 30 15 0.7802998
## 0.3 2 30 20 0.7805643
## 0.3 2 30 25 0.7846360
## 0.3 2 30 30 0.7846375
## 0.3 2 30 35 0.7856779
## 0.3 2 30 40 0.7941149
## 0.3 2 30 45 0.7918170
## 0.3 2 30 50 0.7933506
## 0.3 3 10 5 0.7803144
## 0.3 3 10 10 0.7956632
## 0.3 3 10 15 0.8023054
## 0.3 3 10 20 0.8015426
## 0.3 3 10 25 0.8015443
## 0.3 3 10 30 0.8028149
## 0.3 3 10 35 0.8025699
## 0.3 3 10 40 0.7999993
## 0.3 3 10 45 0.7943925
## 0.3 3 10 50 0.7997657
## 0.3 3 20 5 0.7774907
## 0.3 3 20 10 0.7925781
## 0.3 3 20 15 0.7918154
## 0.3 3 20 20 0.7977112
## 0.3 3 20 25 0.8000075
## 0.3 3 20 30 0.8017942
## 0.3 3 20 35 0.7974548
## 0.3 3 20 40 0.7969354
## 0.3 3 20 45 0.8043323
## 0.3 3 20 50 0.8061287
## 0.3 3 30 5 0.7734125
## 0.3 3 30 10 0.7821126
## 0.3 3 30 15 0.7915574
## 0.3 3 30 20 0.7846490
## 0.3 3 30 25 0.7897723
## 0.3 3 30 30 0.7895191
## 0.3 3 30 35 0.7923250
## 0.3 3 30 40 0.7948988
## 0.3 3 30 45 0.7931023
## 0.3 3 30 50 0.7943795
## Kappa Accuracy SD Kappa SD
## 0.5068990 0.02989148 0.06320075
## 0.5116754 0.03049068 0.06346941
## 0.5122267 0.03049068 0.06344830
## 0.5099611 0.02948566 0.06079232
## 0.5128053 0.03036620 0.06321439
## 0.5167000 0.03125230 0.06447723
## 0.5121230 0.03134333 0.06508044
## 0.5264767 0.02909830 0.05868774
## 0.5316101 0.03017819 0.06184828
## 0.5402722 0.02910022 0.05994546
## 0.5039342 0.02776113 0.05596537
## 0.5087632 0.02990394 0.06239219
## 0.5090396 0.02989148 0.06230063
## 0.5090396 0.02989148 0.06230063
## 0.5101743 0.03039883 0.06358601
## 0.5156691 0.03260019 0.06835429
## 0.5155470 0.02893755 0.05940530
## 0.5129674 0.02814021 0.05789075
## 0.5164180 0.02620867 0.05247451
## 0.5278479 0.02636268 0.05332780
## 0.5078179 0.03023477 0.06384033
## 0.5109710 0.02999089 0.06112310
## 0.5122267 0.03049068 0.06344830
## 0.5122267 0.03049068 0.06344830
## 0.5070237 0.02924455 0.05991361
## 0.5104608 0.03079029 0.06334470
## 0.5138546 0.03046889 0.06339141
## 0.5183164 0.02969520 0.06182831
## 0.5221668 0.03191171 0.06711851
## 0.5277807 0.03098311 0.06419363
## 0.5003228 0.01943130 0.04543021
## 0.4876053 0.02805329 0.05555898
## 0.5083940 0.03128839 0.06185535
## 0.5291644 0.03282362 0.06747275
## 0.5447690 0.03282961 0.06938786
## 0.5524133 0.02483552 0.05327124
## 0.5580691 0.02424531 0.05201645
## 0.5729243 0.02191954 0.04712153
## 0.5731354 0.02195673 0.04752952
## 0.5747671 0.02251967 0.04722950
## 0.5003228 0.01943130 0.04543021
## 0.4909829 0.02522061 0.05143022
## 0.4885601 0.02326584 0.04555953
## 0.4939438 0.03013584 0.06018546
## 0.5142870 0.02734693 0.05734199
## 0.5240661 0.02964560 0.06225099
## 0.5313803 0.03038308 0.06422843
## 0.5393379 0.02891215 0.06225570
## 0.5533187 0.02424764 0.05181633
## 0.5588020 0.02938789 0.06293289
## 0.5003228 0.01943130 0.04543021
## 0.4935595 0.02555045 0.05146750
## 0.4946169 0.03018342 0.06177469
## 0.4966534 0.02983784 0.06149704
## 0.5116707 0.02858429 0.05836913
## 0.5234695 0.02768109 0.05730050
## 0.5279118 0.03053069 0.06479479
## 0.5297229 0.03088374 0.06587760
## 0.5349552 0.03198181 0.06821559
## 0.5414414 0.02713213 0.05687380
## 0.5025879 0.01980548 0.04635923
## 0.5097134 0.02248619 0.05092485
## 0.5277655 0.02539061 0.05826005
## 0.5477875 0.02564416 0.05743698
## 0.5665746 0.02388304 0.05304513
## 0.5682688 0.02716449 0.06075609
## 0.5764466 0.02656016 0.05838217
## 0.5792110 0.02717506 0.05886332
## 0.5794138 0.02455545 0.05321545
## 0.5852808 0.02485300 0.05329289
## 0.5003228 0.01943130 0.04543021
## 0.4997695 0.02255565 0.05173043
## 0.5185328 0.02346828 0.05354928
## 0.5289593 0.02458976 0.05261948
## 0.5430550 0.02372909 0.05118290
## 0.5444704 0.02727598 0.05917280
## 0.5611766 0.02576930 0.05545747
## 0.5596960 0.02379788 0.05124839
## 0.5602221 0.02422032 0.05201502
## 0.5663037 0.02748152 0.05951490
## 0.5003228 0.01943130 0.04543021
## 0.4972711 0.01881778 0.04425143
## 0.5072300 0.02783253 0.05655506
## 0.5272299 0.02945697 0.06193121
## 0.5272033 0.02724902 0.05744290
## 0.5284531 0.03037383 0.06574787
## 0.5446295 0.02888876 0.06078796
## 0.5426307 0.03154473 0.06574049
## 0.5427859 0.02930526 0.06203585
## 0.5410170 0.03066654 0.06346243
## 0.4969020 0.02905763 0.06313107
## 0.5106325 0.03024207 0.06293365
## 0.5168493 0.03116431 0.06384049
## 0.5222493 0.03337429 0.06958435
## 0.5454766 0.03015808 0.06215687
## 0.5520518 0.02897665 0.05979709
## 0.5560712 0.02809719 0.05785234
## 0.5624375 0.02555621 0.05248440
## 0.5585148 0.02528132 0.05253832
## 0.5601679 0.02210160 0.04590014
## 0.5127959 0.03098242 0.06365961
## 0.5087470 0.02908016 0.06020368
## 0.5053047 0.03154475 0.06602654
## 0.5204837 0.03094083 0.06430366
## 0.5233665 0.03058057 0.06259138
## 0.5327456 0.02685929 0.05464271
## 0.5416816 0.02432870 0.04958532
## 0.5548815 0.02144464 0.04366260
## 0.5531995 0.02263922 0.04633041
## 0.5538997 0.02375048 0.04851069
## 0.5045487 0.02884121 0.05936438
## 0.5093319 0.03012906 0.06297748
## 0.5132175 0.02956596 0.06185566
## 0.5203602 0.03395988 0.07195817
## 0.5316613 0.02748804 0.05628281
## 0.5295998 0.02172159 0.04344300
## 0.5313039 0.02844610 0.05961419
## 0.5304756 0.02666457 0.05625595
## 0.5381902 0.02321282 0.04736218
## 0.5343281 0.02483476 0.05045475
## 0.4950654 0.02584843 0.05095010
## 0.5249029 0.02943062 0.05776693
## 0.5596901 0.02325698 0.04757517
## 0.5760927 0.02614172 0.05533125
## 0.5789276 0.02754474 0.05934038
## 0.5776058 0.02813928 0.06052737
## 0.5731894 0.02487932 0.05158207
## 0.5783795 0.02812301 0.06029140
## 0.5745024 0.02609011 0.05635277
## 0.5789149 0.02455203 0.05181259
## 0.4921559 0.02713570 0.05104730
## 0.5054420 0.03044131 0.06086967
## 0.5317215 0.03013853 0.06154400
## 0.5471720 0.03156888 0.06771020
## 0.5587298 0.02930018 0.06162407
## 0.5586091 0.03139512 0.06613766
## 0.5664189 0.03054368 0.06530382
## 0.5714003 0.02600282 0.05390575
## 0.5661670 0.02418800 0.05110142
## 0.5689763 0.02760907 0.05800245
## 0.4970240 0.02383458 0.04804574
## 0.5065992 0.02652028 0.05409472
## 0.5056798 0.02643911 0.05383536
## 0.5262018 0.02581677 0.05358491
## 0.5393275 0.02626151 0.05419982
## 0.5419764 0.02693321 0.05582672
## 0.5383184 0.02563819 0.05284795
## 0.5429392 0.02824805 0.05835249
## 0.5548746 0.02950318 0.06118314
## 0.5623431 0.02559582 0.05337077
## 0.5066489 0.02106890 0.04680100
## 0.5455886 0.02430075 0.05307135
## 0.5638722 0.02435777 0.05385935
## 0.5635308 0.02491793 0.05411678
## 0.5748122 0.02782335 0.05975369
## 0.5825965 0.02557957 0.05436915
## 0.5791112 0.02454043 0.05319673
## 0.5832853 0.02413513 0.05164381
## 0.5846561 0.02703984 0.05727037
## 0.5871523 0.02751466 0.05762595
## 0.4910376 0.02483772 0.05246725
## 0.5317716 0.03188216 0.06673153
## 0.5444316 0.02748041 0.05758297
## 0.5607694 0.02621358 0.05720106
## 0.5585958 0.02332011 0.05095746
## 0.5655117 0.02412584 0.05008549
## 0.5609522 0.02602048 0.05473449
## 0.5692131 0.02638864 0.05591078
## 0.5647448 0.02825116 0.05980830
## 0.5730702 0.02697304 0.05724702
## 0.4905673 0.02551313 0.05556197
## 0.5099487 0.02642500 0.05573226
## 0.5298149 0.02655870 0.05549651
## 0.5442780 0.02265255 0.04689895
## 0.5564607 0.02433550 0.05103039
## 0.5549483 0.02707553 0.05652122
## 0.5556731 0.02853002 0.05880285
## 0.5641275 0.02534519 0.05256388
## 0.5690210 0.02856476 0.05971621
## 0.5713791 0.02817817 0.05873615
## 0.5113058 0.02991173 0.06577079
## 0.5161516 0.02975794 0.06212736
## 0.5269481 0.03001907 0.06092161
## 0.5288953 0.03094444 0.06308286
## 0.5455104 0.02486484 0.05045012
## 0.5461805 0.02793851 0.05701533
## 0.5503732 0.02707467 0.05620419
## 0.5615423 0.02693005 0.05658118
## 0.5441289 0.02880323 0.05894892
## 0.5455595 0.02783118 0.05841053
## 0.5062034 0.02973347 0.06351429
## 0.5121327 0.03649630 0.07636107
## 0.5256620 0.03469460 0.06969554
## 0.5319991 0.03271957 0.06703400
## 0.5361551 0.02515516 0.05087471
## 0.5450275 0.02727182 0.05862499
## 0.5469076 0.02362591 0.04850026
## 0.5502612 0.02254087 0.04526951
## 0.5567018 0.02411068 0.04955774
## 0.5642937 0.02431196 0.04961548
## 0.5094269 0.03108580 0.06472619
## 0.5153240 0.02816689 0.05575237
## 0.5263178 0.02699728 0.05483013
## 0.5249356 0.02908962 0.05930803
## 0.5300506 0.02158561 0.04509479
## 0.5452791 0.02465576 0.04990347
## 0.5435970 0.02165604 0.04440491
## 0.5420138 0.02402749 0.04941061
## 0.5386598 0.02284500 0.04615461
## 0.5372723 0.02400847 0.04941306
## 0.5127404 0.02427860 0.05337630
## 0.5487454 0.02708556 0.05414822
## 0.5688673 0.02953122 0.06202047
## 0.5828046 0.02271731 0.04856831
## 0.5792530 0.02406788 0.05166909
## 0.5799759 0.02757551 0.05880047
## 0.5710994 0.02739864 0.05858402
## 0.5752218 0.02801623 0.05887613
## 0.5797910 0.02736330 0.05786494
## 0.5784495 0.02738766 0.05714912
## 0.4980699 0.03015008 0.06043332
## 0.5300725 0.03233429 0.06820843
## 0.5526998 0.03001324 0.06255075
## 0.5620874 0.02576839 0.05248620
## 0.5756971 0.02585228 0.05459968
## 0.5701116 0.02972990 0.06305907
## 0.5690775 0.02989518 0.06404186
## 0.5660351 0.02730382 0.05734733
## 0.5749226 0.02660593 0.05676162
## 0.5712990 0.02367713 0.05067415
## 0.4874785 0.02613835 0.05460939
## 0.5068663 0.03292139 0.07033958
## 0.5329821 0.03167851 0.06582253
## 0.5361748 0.03143679 0.06528534
## 0.5448641 0.02817413 0.05978375
## 0.5443156 0.02691208 0.05659360
## 0.5474512 0.02827260 0.05958969
## 0.5649149 0.02536301 0.05359065
## 0.5602842 0.02554563 0.05421837
## 0.5640671 0.02426604 0.05123815
## 0.5266237 0.02638660 0.05811096
## 0.5652257 0.02428825 0.05160268
## 0.5830309 0.02098591 0.04438336
## 0.5815659 0.02868528 0.05904744
## 0.5825131 0.02949765 0.06168518
## 0.5856772 0.02850845 0.05899529
## 0.5848333 0.02366155 0.04900259
## 0.5793287 0.02372010 0.04804921
## 0.5682937 0.02716170 0.05544241
## 0.5798493 0.02645018 0.05472741
## 0.5199651 0.02581712 0.05476589
## 0.5581072 0.02730160 0.05883057
## 0.5574222 0.02729231 0.05923644
## 0.5706626 0.02468845 0.05323286
## 0.5773610 0.02761652 0.05903879
## 0.5817431 0.02674297 0.05708867
## 0.5729509 0.02880149 0.06086004
## 0.5723672 0.02656655 0.05625026
## 0.5878010 0.02499278 0.05241269
## 0.5912440 0.02472564 0.05325086
## 0.5059979 0.02431065 0.05129652
## 0.5312242 0.02560560 0.05338802
## 0.5557316 0.02649882 0.05721655
## 0.5438182 0.02877293 0.06077471
## 0.5558637 0.02568464 0.05398612
## 0.5550227 0.02543186 0.05520704
## 0.5606423 0.02714567 0.05887034
## 0.5666388 0.02716689 0.05676868
## 0.5630442 0.02595302 0.05452447
## 0.5658454 0.03034492 0.06401267
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were n.trees = 50, interaction.depth
## = 3, shrinkage = 0.3 and n.minobsinnode = 20.
trellis.par.set(caretTheme())
plot(gbmFit)
ggplot(gbmFit)
##Accuracy 0.8079415 (Stand.Dev. 0.02443083)
logisticReg #Accuracy 0.7813753 (Stand.Dev. 0.03270276)
## Generalized Linear Model
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 625, ...
## Resampling results
##
## Accuracy Kappa Accuracy SD Kappa SD
## 0.7813753 0.5437916 0.03270276 0.06892272
##
##
BayesianLogReg #Accuracy 0.7821429 (Stand.Dev. 0.03264758)
## Bayesian Generalized Linear Model
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 625, ...
## Resampling results
##
## Accuracy Kappa Accuracy SD Kappa SD
## 0.7821429 0.5453747 0.03264758 0.06862032
##
##
rpartTune1 #Accuracy 0.7769606 (Stand.Dev. 0.02825927)
## CART
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 625, ...
## Resampling results across tuning parameters:
##
## cp Accuracy Kappa Accuracy SD Kappa SD
## 0.00000000 0.7713981 0.5195163 0.03267479 0.06791937
## 0.04811353 0.7634656 0.5013339 0.02769522 0.06197579
## 0.09622707 0.7673117 0.5120282 0.03082519 0.06651355
## 0.14434060 0.7673117 0.5120282 0.03082519 0.06651355
## 0.19245414 0.7673117 0.5120282 0.03082519 0.06651355
## 0.24056767 0.7673117 0.5120282 0.03082519 0.06651355
## 0.28868120 0.7673117 0.5120282 0.03082519 0.06651355
## 0.33679474 0.7673117 0.5120282 0.03082519 0.06651355
## 0.38490827 0.7673117 0.5120282 0.03082519 0.06651355
## 0.43302181 0.6697641 0.2403141 0.07970959 0.23704117
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was cp = 0.
rpartTune2 #Accuracy 0.7759268 (Stand.Dev. 0.02634301)
## CART
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 626, 625, 626, 625, 626, ...
## Resampling results across tuning parameters:
##
## maxdepth Accuracy Kappa Accuracy SD Kappa SD
## 1 0.7672775 0.5118670 0.02998430 0.06530389
## 3 0.7770015 0.5238072 0.02521525 0.05919326
## 4 0.7749600 0.5158499 0.02649617 0.05981048
## 7 0.7767353 0.5233229 0.02513619 0.05227189
## 12 0.7757064 0.5220013 0.02583147 0.05357595
## 16 0.7757064 0.5220013 0.02583147 0.05357595
## 20 0.7757064 0.5220013 0.02583147 0.05357595
## 21 0.7757064 0.5220013 0.02583147 0.05357595
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was maxdepth = 3.
randomForestFit #Accuracy 0.7967534 (Stand.Dev. 0.03508362)
## Random Forest
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 625, ...
## Resampling results across tuning parameters:
##
## mtry Accuracy Kappa Accuracy SD Kappa SD
## 1 0.7762926 0.5168517 0.03526116 0.07769250
## 2 0.7931474 0.5612414 0.03202731 0.06974171
## 3 0.7967534 0.5713995 0.03508362 0.07558851
## 4 0.7885857 0.5560803 0.03391657 0.07259389
## 5 0.7793679 0.5377080 0.03684067 0.07889677
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 3.
gbmFit #Accuracy 0.8079415 (Stand.Dev. 0.02443083)
## Stochastic Gradient Boosting
##
## 782 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 626, 624, 626, 626, 626, 626, ...
## Resampling results across tuning parameters:
##
## shrinkage interaction.depth n.minobsinnode n.trees Accuracy
## 0.1 1 10 5 0.7659847
## 0.1 1 10 10 0.7672586
## 0.1 1 10 15 0.7672586
## 0.1 1 10 20 0.7664894
## 0.1 1 10 25 0.7675150
## 0.1 1 10 30 0.7695582
## 0.1 1 10 35 0.7675232
## 0.1 1 10 40 0.7739139
## 0.1 1 10 45 0.7759521
## 0.1 1 10 50 0.7797966
## 0.1 1 20 5 0.7652139
## 0.1 1 20 10 0.7662395
## 0.1 1 20 15 0.7659847
## 0.1 1 20 20 0.7659847
## 0.1 1 20 25 0.7664976
## 0.1 1 20 30 0.7687987
## 0.1 1 20 35 0.7690503
## 0.1 1 20 40 0.7675151
## 0.1 1 20 45 0.7693034
## 0.1 1 20 50 0.7744267
## 0.1 1 30 5 0.7659766
## 0.1 1 30 10 0.7670022
## 0.1 1 30 15 0.7672586
## 0.1 1 30 20 0.7672586
## 0.1 1 30 25 0.7652139
## 0.1 1 30 30 0.7664878
## 0.1 1 30 35 0.7685391
## 0.1 1 30 40 0.7698227
## 0.1 1 30 45 0.7718675
## 0.1 1 30 50 0.7741752
## 0.1 2 10 5 0.7762152
## 0.1 2 10 10 0.7652106
## 0.1 2 10 15 0.7723771
## 0.1 2 10 20 0.7813221
## 0.1 2 10 25 0.7882337
## 0.1 2 10 30 0.7913172
## 0.1 2 10 35 0.7936167
## 0.1 2 10 40 0.8002687
## 0.1 2 10 45 0.8000058
## 0.1 2 10 50 0.8000009
## 0.1 2 20 5 0.7762152
## 0.1 2 20 10 0.7682762
## 0.1 2 20 15 0.7636739
## 0.1 2 20 20 0.7644349
## 0.1 2 20 25 0.7741508
## 0.1 2 20 30 0.7782599
## 0.1 2 20 35 0.7815867
## 0.1 2 20 40 0.7841362
## 0.1 2 20 45 0.7900238
## 0.1 2 20 50 0.7923250
## 0.1 2 30 5 0.7762152
## 0.1 2 30 10 0.7700630
## 0.1 2 30 15 0.7667572
## 0.1 2 30 20 0.7667491
## 0.1 2 30 25 0.7733962
## 0.1 2 30 30 0.7787645
## 0.1 2 30 35 0.7795354
## 0.1 2 30 40 0.7800433
## 0.1 2 30 45 0.7823493
## 0.1 2 30 50 0.7849200
## 0.1 3 10 5 0.7769828
## 0.1 3 10 10 0.7779971
## 0.1 3 10 15 0.7833784
## 0.1 3 10 20 0.7897804
## 0.1 3 10 25 0.7977079
## 0.1 3 10 30 0.7977080
## 0.1 3 10 35 0.8010396
## 0.1 3 10 40 0.8018072
## 0.1 3 10 45 0.8015459
## 0.1 3 10 50 0.8041051
## 0.1 3 20 5 0.7762152
## 0.1 3 20 10 0.7736478
## 0.1 3 20 15 0.7792953
## 0.1 3 20 20 0.7820913
## 0.1 3 20 25 0.7882419
## 0.1 3 20 30 0.7879823
## 0.1 3 20 35 0.7949004
## 0.1 3 20 40 0.7933604
## 0.1 3 20 45 0.7933604
## 0.1 3 20 50 0.7956485
## 0.1 3 30 5 0.7762152
## 0.1 3 30 10 0.7744203
## 0.1 3 30 15 0.7739204
## 0.1 3 30 20 0.7813253
## 0.1 3 30 25 0.7797787
## 0.1 3 30 30 0.7800368
## 0.1 3 30 35 0.7869534
## 0.1 3 30 40 0.7856729
## 0.1 3 30 45 0.7851634
## 0.1 3 30 50 0.7838846
## 0.2 1 10 5 0.7621419
## 0.2 1 10 10 0.7670071
## 0.2 1 10 15 0.7698146
## 0.2 1 10 20 0.7716144
## 0.2 1 10 25 0.7826302
## 0.2 1 10 30 0.7856957
## 0.2 1 10 35 0.7874743
## 0.2 1 10 40 0.7902997
## 0.2 1 10 45 0.7884999
## 0.2 1 10 50 0.7890209
## 0.2 1 20 5 0.7680230
## 0.2 1 20 10 0.7659766
## 0.2 1 20 15 0.7646978
## 0.2 1 20 20 0.7716078
## 0.2 1 20 25 0.7723852
## 0.2 1 20 30 0.7764845
## 0.2 1 20 35 0.7808337
## 0.2 1 20 40 0.7867067
## 0.2 1 20 45 0.7861939
## 0.2 1 20 50 0.7867100
## 0.2 1 30 5 0.7649575
## 0.2 1 30 10 0.7664959
## 0.2 1 30 15 0.7682810
## 0.2 1 30 20 0.7708435
## 0.2 1 30 25 0.7762102
## 0.2 1 30 30 0.7754459
## 0.2 1 30 35 0.7759604
## 0.2 1 30 40 0.7756990
## 0.2 1 30 45 0.7792953
## 0.2 1 30 50 0.7772473
## 0.2 2 10 5 0.7695728
## 0.2 2 10 10 0.7803192
## 0.2 2 10 15 0.7946213
## 0.2 2 10 20 0.8007620
## 0.2 2 10 25 0.8012944
## 0.2 2 10 30 0.8005220
## 0.2 2 10 35 0.7977128
## 0.2 2 10 40 0.8002607
## 0.2 2 10 45 0.7984707
## 0.2 2 10 50 0.8005072
## 0.2 2 20 5 0.7687841
## 0.2 2 20 10 0.7716160
## 0.2 2 20 15 0.7823673
## 0.2 2 20 20 0.7882419
## 0.2 2 20 25 0.7920799
## 0.2 2 20 30 0.7920815
## 0.2 2 20 35 0.7951520
## 0.2 2 20 40 0.7969322
## 0.2 2 20 45 0.7948874
## 0.2 2 20 50 0.7954002
## 0.2 2 30 5 0.7692970
## 0.2 2 30 10 0.7721191
## 0.2 2 30 15 0.7698114
## 0.2 2 30 20 0.7782533
## 0.2 2 30 25 0.7833847
## 0.2 2 30 30 0.7841361
## 0.2 2 30 35 0.7818350
## 0.2 2 30 40 0.7836184
## 0.2 2 30 45 0.7892496
## 0.2 2 30 50 0.7925862
## 0.2 3 10 5 0.7754541
## 0.2 3 10 10 0.7887596
## 0.2 3 10 15 0.7961809
## 0.2 3 10 20 0.7948939
## 0.2 3 10 25 0.7989933
## 0.2 3 10 30 0.8020473
## 0.2 3 10 35 0.8002720
## 0.2 3 10 40 0.8020523
## 0.2 3 10 45 0.8025618
## 0.2 3 10 50 0.8038439
## 0.2 3 20 5 0.7690600
## 0.2 3 20 10 0.7831252
## 0.2 3 20 15 0.7869453
## 0.2 3 20 20 0.7933474
## 0.2 3 20 25 0.7915509
## 0.2 3 20 30 0.7943730
## 0.2 3 20 35 0.7920686
## 0.2 3 20 40 0.7959033
## 0.2 3 20 45 0.7935989
## 0.2 3 20 50 0.7974402
## 0.2 3 30 5 0.7705774
## 0.2 3 30 10 0.7736641
## 0.2 3 30 15 0.7805578
## 0.2 3 30 20 0.7856713
## 0.2 3 30 25 0.7912862
## 0.2 3 30 30 0.7900123
## 0.2 3 30 35 0.7902574
## 0.2 3 30 40 0.7941035
## 0.2 3 30 45 0.7961564
## 0.2 3 30 50 0.7969240
## 0.3 1 10 5 0.7675150
## 0.3 1 10 10 0.7700677
## 0.3 1 10 15 0.7736542
## 0.3 1 10 20 0.7744202
## 0.3 1 10 25 0.7823591
## 0.3 1 10 30 0.7823689
## 0.3 1 10 35 0.7844137
## 0.3 1 10 40 0.7897917
## 0.3 1 10 45 0.7810852
## 0.3 1 10 50 0.7818610
## 0.3 1 20 5 0.7657365
## 0.3 1 20 10 0.7669989
## 0.3 1 20 15 0.7736591
## 0.3 1 20 20 0.7757104
## 0.3 1 20 25 0.7780213
## 0.3 1 20 30 0.7823771
## 0.3 1 20 35 0.7831349
## 0.3 1 20 40 0.7849281
## 0.3 1 20 45 0.7877405
## 0.3 1 20 50 0.7913221
## 0.3 1 30 5 0.7667491
## 0.3 1 30 10 0.7693116
## 0.3 1 30 15 0.7736656
## 0.3 1 30 20 0.7728948
## 0.3 1 30 25 0.7754524
## 0.3 1 30 30 0.7821060
## 0.3 1 30 35 0.7813286
## 0.3 1 30 40 0.7805562
## 0.3 1 30 45 0.7795370
## 0.3 1 30 50 0.7785195
## 0.3 2 10 5 0.7736429
## 0.3 2 10 10 0.7895142
## 0.3 2 10 15 0.7971756
## 0.3 2 10 20 0.8030714
## 0.3 2 10 25 0.8012814
## 0.3 2 10 30 0.8015329
## 0.3 2 10 35 0.7966676
## 0.3 2 10 40 0.7987124
## 0.3 2 10 45 0.8007507
## 0.3 2 10 50 0.7994800
## 0.3 2 20 5 0.7667540
## 0.3 2 20 10 0.7805692
## 0.3 2 20 15 0.7892659
## 0.3 2 20 20 0.7930925
## 0.3 2 20 25 0.7994768
## 0.3 2 20 30 0.7966660
## 0.3 2 20 35 0.7958854
## 0.3 2 20 40 0.7941116
## 0.3 2 20 45 0.7984462
## 0.3 2 20 50 0.7969176
## 0.3 2 30 5 0.7639287
## 0.3 2 30 10 0.7693068
## 0.3 2 30 15 0.7802998
## 0.3 2 30 20 0.7805643
## 0.3 2 30 25 0.7846360
## 0.3 2 30 30 0.7846375
## 0.3 2 30 35 0.7856779
## 0.3 2 30 40 0.7941149
## 0.3 2 30 45 0.7918170
## 0.3 2 30 50 0.7933506
## 0.3 3 10 5 0.7803144
## 0.3 3 10 10 0.7956632
## 0.3 3 10 15 0.8023054
## 0.3 3 10 20 0.8015426
## 0.3 3 10 25 0.8015443
## 0.3 3 10 30 0.8028149
## 0.3 3 10 35 0.8025699
## 0.3 3 10 40 0.7999993
## 0.3 3 10 45 0.7943925
## 0.3 3 10 50 0.7997657
## 0.3 3 20 5 0.7774907
## 0.3 3 20 10 0.7925781
## 0.3 3 20 15 0.7918154
## 0.3 3 20 20 0.7977112
## 0.3 3 20 25 0.8000075
## 0.3 3 20 30 0.8017942
## 0.3 3 20 35 0.7974548
## 0.3 3 20 40 0.7969354
## 0.3 3 20 45 0.8043323
## 0.3 3 20 50 0.8061287
## 0.3 3 30 5 0.7734125
## 0.3 3 30 10 0.7821126
## 0.3 3 30 15 0.7915574
## 0.3 3 30 20 0.7846490
## 0.3 3 30 25 0.7897723
## 0.3 3 30 30 0.7895191
## 0.3 3 30 35 0.7923250
## 0.3 3 30 40 0.7948988
## 0.3 3 30 45 0.7931023
## 0.3 3 30 50 0.7943795
## Kappa Accuracy SD Kappa SD
## 0.5068990 0.02989148 0.06320075
## 0.5116754 0.03049068 0.06346941
## 0.5122267 0.03049068 0.06344830
## 0.5099611 0.02948566 0.06079232
## 0.5128053 0.03036620 0.06321439
## 0.5167000 0.03125230 0.06447723
## 0.5121230 0.03134333 0.06508044
## 0.5264767 0.02909830 0.05868774
## 0.5316101 0.03017819 0.06184828
## 0.5402722 0.02910022 0.05994546
## 0.5039342 0.02776113 0.05596537
## 0.5087632 0.02990394 0.06239219
## 0.5090396 0.02989148 0.06230063
## 0.5090396 0.02989148 0.06230063
## 0.5101743 0.03039883 0.06358601
## 0.5156691 0.03260019 0.06835429
## 0.5155470 0.02893755 0.05940530
## 0.5129674 0.02814021 0.05789075
## 0.5164180 0.02620867 0.05247451
## 0.5278479 0.02636268 0.05332780
## 0.5078179 0.03023477 0.06384033
## 0.5109710 0.02999089 0.06112310
## 0.5122267 0.03049068 0.06344830
## 0.5122267 0.03049068 0.06344830
## 0.5070237 0.02924455 0.05991361
## 0.5104608 0.03079029 0.06334470
## 0.5138546 0.03046889 0.06339141
## 0.5183164 0.02969520 0.06182831
## 0.5221668 0.03191171 0.06711851
## 0.5277807 0.03098311 0.06419363
## 0.5003228 0.01943130 0.04543021
## 0.4876053 0.02805329 0.05555898
## 0.5083940 0.03128839 0.06185535
## 0.5291644 0.03282362 0.06747275
## 0.5447690 0.03282961 0.06938786
## 0.5524133 0.02483552 0.05327124
## 0.5580691 0.02424531 0.05201645
## 0.5729243 0.02191954 0.04712153
## 0.5731354 0.02195673 0.04752952
## 0.5747671 0.02251967 0.04722950
## 0.5003228 0.01943130 0.04543021
## 0.4909829 0.02522061 0.05143022
## 0.4885601 0.02326584 0.04555953
## 0.4939438 0.03013584 0.06018546
## 0.5142870 0.02734693 0.05734199
## 0.5240661 0.02964560 0.06225099
## 0.5313803 0.03038308 0.06422843
## 0.5393379 0.02891215 0.06225570
## 0.5533187 0.02424764 0.05181633
## 0.5588020 0.02938789 0.06293289
## 0.5003228 0.01943130 0.04543021
## 0.4935595 0.02555045 0.05146750
## 0.4946169 0.03018342 0.06177469
## 0.4966534 0.02983784 0.06149704
## 0.5116707 0.02858429 0.05836913
## 0.5234695 0.02768109 0.05730050
## 0.5279118 0.03053069 0.06479479
## 0.5297229 0.03088374 0.06587760
## 0.5349552 0.03198181 0.06821559
## 0.5414414 0.02713213 0.05687380
## 0.5025879 0.01980548 0.04635923
## 0.5097134 0.02248619 0.05092485
## 0.5277655 0.02539061 0.05826005
## 0.5477875 0.02564416 0.05743698
## 0.5665746 0.02388304 0.05304513
## 0.5682688 0.02716449 0.06075609
## 0.5764466 0.02656016 0.05838217
## 0.5792110 0.02717506 0.05886332
## 0.5794138 0.02455545 0.05321545
## 0.5852808 0.02485300 0.05329289
## 0.5003228 0.01943130 0.04543021
## 0.4997695 0.02255565 0.05173043
## 0.5185328 0.02346828 0.05354928
## 0.5289593 0.02458976 0.05261948
## 0.5430550 0.02372909 0.05118290
## 0.5444704 0.02727598 0.05917280
## 0.5611766 0.02576930 0.05545747
## 0.5596960 0.02379788 0.05124839
## 0.5602221 0.02422032 0.05201502
## 0.5663037 0.02748152 0.05951490
## 0.5003228 0.01943130 0.04543021
## 0.4972711 0.01881778 0.04425143
## 0.5072300 0.02783253 0.05655506
## 0.5272299 0.02945697 0.06193121
## 0.5272033 0.02724902 0.05744290
## 0.5284531 0.03037383 0.06574787
## 0.5446295 0.02888876 0.06078796
## 0.5426307 0.03154473 0.06574049
## 0.5427859 0.02930526 0.06203585
## 0.5410170 0.03066654 0.06346243
## 0.4969020 0.02905763 0.06313107
## 0.5106325 0.03024207 0.06293365
## 0.5168493 0.03116431 0.06384049
## 0.5222493 0.03337429 0.06958435
## 0.5454766 0.03015808 0.06215687
## 0.5520518 0.02897665 0.05979709
## 0.5560712 0.02809719 0.05785234
## 0.5624375 0.02555621 0.05248440
## 0.5585148 0.02528132 0.05253832
## 0.5601679 0.02210160 0.04590014
## 0.5127959 0.03098242 0.06365961
## 0.5087470 0.02908016 0.06020368
## 0.5053047 0.03154475 0.06602654
## 0.5204837 0.03094083 0.06430366
## 0.5233665 0.03058057 0.06259138
## 0.5327456 0.02685929 0.05464271
## 0.5416816 0.02432870 0.04958532
## 0.5548815 0.02144464 0.04366260
## 0.5531995 0.02263922 0.04633041
## 0.5538997 0.02375048 0.04851069
## 0.5045487 0.02884121 0.05936438
## 0.5093319 0.03012906 0.06297748
## 0.5132175 0.02956596 0.06185566
## 0.5203602 0.03395988 0.07195817
## 0.5316613 0.02748804 0.05628281
## 0.5295998 0.02172159 0.04344300
## 0.5313039 0.02844610 0.05961419
## 0.5304756 0.02666457 0.05625595
## 0.5381902 0.02321282 0.04736218
## 0.5343281 0.02483476 0.05045475
## 0.4950654 0.02584843 0.05095010
## 0.5249029 0.02943062 0.05776693
## 0.5596901 0.02325698 0.04757517
## 0.5760927 0.02614172 0.05533125
## 0.5789276 0.02754474 0.05934038
## 0.5776058 0.02813928 0.06052737
## 0.5731894 0.02487932 0.05158207
## 0.5783795 0.02812301 0.06029140
## 0.5745024 0.02609011 0.05635277
## 0.5789149 0.02455203 0.05181259
## 0.4921559 0.02713570 0.05104730
## 0.5054420 0.03044131 0.06086967
## 0.5317215 0.03013853 0.06154400
## 0.5471720 0.03156888 0.06771020
## 0.5587298 0.02930018 0.06162407
## 0.5586091 0.03139512 0.06613766
## 0.5664189 0.03054368 0.06530382
## 0.5714003 0.02600282 0.05390575
## 0.5661670 0.02418800 0.05110142
## 0.5689763 0.02760907 0.05800245
## 0.4970240 0.02383458 0.04804574
## 0.5065992 0.02652028 0.05409472
## 0.5056798 0.02643911 0.05383536
## 0.5262018 0.02581677 0.05358491
## 0.5393275 0.02626151 0.05419982
## 0.5419764 0.02693321 0.05582672
## 0.5383184 0.02563819 0.05284795
## 0.5429392 0.02824805 0.05835249
## 0.5548746 0.02950318 0.06118314
## 0.5623431 0.02559582 0.05337077
## 0.5066489 0.02106890 0.04680100
## 0.5455886 0.02430075 0.05307135
## 0.5638722 0.02435777 0.05385935
## 0.5635308 0.02491793 0.05411678
## 0.5748122 0.02782335 0.05975369
## 0.5825965 0.02557957 0.05436915
## 0.5791112 0.02454043 0.05319673
## 0.5832853 0.02413513 0.05164381
## 0.5846561 0.02703984 0.05727037
## 0.5871523 0.02751466 0.05762595
## 0.4910376 0.02483772 0.05246725
## 0.5317716 0.03188216 0.06673153
## 0.5444316 0.02748041 0.05758297
## 0.5607694 0.02621358 0.05720106
## 0.5585958 0.02332011 0.05095746
## 0.5655117 0.02412584 0.05008549
## 0.5609522 0.02602048 0.05473449
## 0.5692131 0.02638864 0.05591078
## 0.5647448 0.02825116 0.05980830
## 0.5730702 0.02697304 0.05724702
## 0.4905673 0.02551313 0.05556197
## 0.5099487 0.02642500 0.05573226
## 0.5298149 0.02655870 0.05549651
## 0.5442780 0.02265255 0.04689895
## 0.5564607 0.02433550 0.05103039
## 0.5549483 0.02707553 0.05652122
## 0.5556731 0.02853002 0.05880285
## 0.5641275 0.02534519 0.05256388
## 0.5690210 0.02856476 0.05971621
## 0.5713791 0.02817817 0.05873615
## 0.5113058 0.02991173 0.06577079
## 0.5161516 0.02975794 0.06212736
## 0.5269481 0.03001907 0.06092161
## 0.5288953 0.03094444 0.06308286
## 0.5455104 0.02486484 0.05045012
## 0.5461805 0.02793851 0.05701533
## 0.5503732 0.02707467 0.05620419
## 0.5615423 0.02693005 0.05658118
## 0.5441289 0.02880323 0.05894892
## 0.5455595 0.02783118 0.05841053
## 0.5062034 0.02973347 0.06351429
## 0.5121327 0.03649630 0.07636107
## 0.5256620 0.03469460 0.06969554
## 0.5319991 0.03271957 0.06703400
## 0.5361551 0.02515516 0.05087471
## 0.5450275 0.02727182 0.05862499
## 0.5469076 0.02362591 0.04850026
## 0.5502612 0.02254087 0.04526951
## 0.5567018 0.02411068 0.04955774
## 0.5642937 0.02431196 0.04961548
## 0.5094269 0.03108580 0.06472619
## 0.5153240 0.02816689 0.05575237
## 0.5263178 0.02699728 0.05483013
## 0.5249356 0.02908962 0.05930803
## 0.5300506 0.02158561 0.04509479
## 0.5452791 0.02465576 0.04990347
## 0.5435970 0.02165604 0.04440491
## 0.5420138 0.02402749 0.04941061
## 0.5386598 0.02284500 0.04615461
## 0.5372723 0.02400847 0.04941306
## 0.5127404 0.02427860 0.05337630
## 0.5487454 0.02708556 0.05414822
## 0.5688673 0.02953122 0.06202047
## 0.5828046 0.02271731 0.04856831
## 0.5792530 0.02406788 0.05166909
## 0.5799759 0.02757551 0.05880047
## 0.5710994 0.02739864 0.05858402
## 0.5752218 0.02801623 0.05887613
## 0.5797910 0.02736330 0.05786494
## 0.5784495 0.02738766 0.05714912
## 0.4980699 0.03015008 0.06043332
## 0.5300725 0.03233429 0.06820843
## 0.5526998 0.03001324 0.06255075
## 0.5620874 0.02576839 0.05248620
## 0.5756971 0.02585228 0.05459968
## 0.5701116 0.02972990 0.06305907
## 0.5690775 0.02989518 0.06404186
## 0.5660351 0.02730382 0.05734733
## 0.5749226 0.02660593 0.05676162
## 0.5712990 0.02367713 0.05067415
## 0.4874785 0.02613835 0.05460939
## 0.5068663 0.03292139 0.07033958
## 0.5329821 0.03167851 0.06582253
## 0.5361748 0.03143679 0.06528534
## 0.5448641 0.02817413 0.05978375
## 0.5443156 0.02691208 0.05659360
## 0.5474512 0.02827260 0.05958969
## 0.5649149 0.02536301 0.05359065
## 0.5602842 0.02554563 0.05421837
## 0.5640671 0.02426604 0.05123815
## 0.5266237 0.02638660 0.05811096
## 0.5652257 0.02428825 0.05160268
## 0.5830309 0.02098591 0.04438336
## 0.5815659 0.02868528 0.05904744
## 0.5825131 0.02949765 0.06168518
## 0.5856772 0.02850845 0.05899529
## 0.5848333 0.02366155 0.04900259
## 0.5793287 0.02372010 0.04804921
## 0.5682937 0.02716170 0.05544241
## 0.5798493 0.02645018 0.05472741
## 0.5199651 0.02581712 0.05476589
## 0.5581072 0.02730160 0.05883057
## 0.5574222 0.02729231 0.05923644
## 0.5706626 0.02468845 0.05323286
## 0.5773610 0.02761652 0.05903879
## 0.5817431 0.02674297 0.05708867
## 0.5729509 0.02880149 0.06086004
## 0.5723672 0.02656655 0.05625026
## 0.5878010 0.02499278 0.05241269
## 0.5912440 0.02472564 0.05325086
## 0.5059979 0.02431065 0.05129652
## 0.5312242 0.02560560 0.05338802
## 0.5557316 0.02649882 0.05721655
## 0.5438182 0.02877293 0.06077471
## 0.5558637 0.02568464 0.05398612
## 0.5550227 0.02543186 0.05520704
## 0.5606423 0.02714567 0.05887034
## 0.5666388 0.02716689 0.05676868
## 0.5630442 0.02595302 0.05452447
## 0.5658454 0.03034492 0.06401267
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were n.trees = 50, interaction.depth
## = 3, shrinkage = 0.3 and n.minobsinnode = 20.
##gfbfit fits best
Based on the accuracy results, the best modeling approach is gbm (Tuning n.trees = 40, interaction.depth = 5, shrinkage = 0.1 and n.minobsinnode = 10).
Using the training_na dataset and removing Cabin variable, we refit the data to gbm modelling approach.
5-fold Cross Validation on accuracy, averaged over 5 repetitions will be the strategy to select the best possible model.
##Let's define a trainControl setting, that will remain the same for all applied models thereon
fitControl <- trainControl(## 10-fold CV
method = "repeatedcv",
number = 5,
#classProbs = TRUE,
## repeated ten times
repeats = 5)
Let’s set the formula including all variables
##Set the formula
formula <- Survived~Pclass+Sex+Age+SibSp+Parch+Fare+Embarked
The criterion is a models Accuracy = (TF+TP)/(TF+FF+FP+TP) ,which we opt to maximize. Under this criterion we shall compare a number of classifiers using the excellent “caret” package.
set.seed(1000)
logisticReg <- train(formula,
data = training_na,
method = "glm",
#metric = "ROC",
trControl = fitControl)
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
## Warning in predict.lm(object, newdata, se.fit, scale = 1, type =
## ifelse(type == : prediction from a rank-deficient fit may be misleading
logisticReg
## Generalized Linear Model
##
## 676 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 540, 541, 541, 541, 541, 541, ...
## Resampling results
##
## Accuracy Kappa Accuracy SD Kappa SD
## 0.7887516 0.5613619 0.03060581 0.0632509
##
##
##Accuracy 0.7887516 (Stand.Dev 0.03060581)
set.seed(1000)
BayesianLogReg <- train(formula,
data = training_na,
method = "bayesglm",
trControl = fitControl)
BayesianLogReg
## Bayesian Generalized Linear Model
##
## 676 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 540, 541, 541, 541, 541, 541, ...
## Resampling results
##
## Accuracy Kappa Accuracy SD Kappa SD
## 0.7896339 0.5629991 0.03101783 0.06455036
##
##
##Accuracy 0.7896339 (Stand.Dev 0.03101783)
set.seed(1000)
##tuning for complexity parameter (cp)
rpartTune1 <- train(training_na[,c(2,3,4,5,6,7,9)], training_na$Survived,
method = "rpart",
tuneLength = 10,
trControl = fitControl)
plot(rpartTune1)
rpartTune1
## CART
##
## 676 samples
## 7 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 540, 541, 541, 541, 541, 541, ...
## Resampling results across tuning parameters:
##
## cp Accuracy Kappa Accuracy SD Kappa SD
## 0.00000000 0.7831285 0.5484498 0.03036959 0.06272175
## 0.04964539 0.7635967 0.5025345 0.02416022 0.05098645
## 0.09929078 0.7692133 0.5186312 0.02783123 0.05781905
## 0.14893617 0.7692133 0.5186312 0.02783123 0.05781905
## 0.19858156 0.7692133 0.5186312 0.02783123 0.05781905
## 0.24822695 0.7692133 0.5186312 0.02783123 0.05781905
## 0.29787234 0.7692133 0.5186312 0.02783123 0.05781905
## 0.34751773 0.7692133 0.5186312 0.02783123 0.05781905
## 0.39716312 0.7692133 0.5186312 0.02783123 0.05781905
## 0.44680851 0.6766599 0.2684261 0.08554935 0.24454961
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was cp = 0.
##Accuracy 0.7872922 (Stand.Dev. 0.03179111)
##tuning for maximum node depth (maxdepth)
rpartTune2 <- train(training_na[,c(2,3,4,5,6,7,9)], training_na$Survived,
method = "rpart2",
tuneLength = 10,
trControl = fitControl)
## note: only 7 possible values of the max tree depth from the initial fit.
## Truncating the grid to 7 .
plot(rpartTune2)
rpartTune2
## CART
##
## 676 samples
## 7 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 541, 540, 541, 541, 541, 540, ...
## Resampling results across tuning parameters:
##
## maxdepth Accuracy Kappa Accuracy SD Kappa SD
## 1 0.7692321 0.5183231 0.03458839 0.07405691
## 2 0.7733913 0.5182442 0.03141861 0.06554766
## 4 0.7825918 0.5401786 0.03562312 0.07510554
## 5 0.7831889 0.5416102 0.03793152 0.08119068
## 9 0.7873044 0.5509052 0.03261511 0.06941881
## 10 0.7873044 0.5509052 0.03261511 0.06941881
## 16 0.7873044 0.5509052 0.03261511 0.06941881
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was maxdepth = 9.
##Accuracy 0.7884292 (Stand.Dev. 0.03934454)
set.seed(1000)
rfGrid = expand.grid(.mtry = c(1,2,3,4,5))
randomForestFit = train(x = training_na[,c(2,3,4,5,6,7,9)],
y = training_na$Survived,
method = "rf",
trControl = fitControl,
tuneGrid = rfGrid,
ntree=30)
plot(randomForestFit)
randomForestFit
## Random Forest
##
## 676 samples
## 7 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 540, 541, 541, 541, 541, 541, ...
## Resampling results across tuning parameters:
##
## mtry Accuracy Kappa Accuracy SD Kappa SD
## 1 0.7914380 0.5574521 0.03347622 0.06811380
## 2 0.8006145 0.5827785 0.03956907 0.08074349
## 3 0.8011962 0.5855667 0.03848834 0.07869026
## 4 0.7872856 0.5600704 0.02946737 0.06110196
## 5 0.7837233 0.5532518 0.03291143 0.06590668
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 3.
varImp(randomForestFit)
## rf variable importance
##
## Overall
## Sex 100.000
## Age 96.147
## Fare 93.730
## Pclass 27.275
## SibSp 8.639
## Parch 2.492
## Embarked 0.000
##Accuracy 0.8011962 (Stand.Dev. 0.03848834)
gbmGrid <- expand.grid(interaction.depth = c(1, 2, 3,4,5),
n.trees = (1:10)*5,
shrinkage = (1:3)*0.1,
n.minobsinnode = (1:3)*10)
gbmFit <- train(formula, data = training_na,
method = "gbm",
trControl = fitControl,
## This last option is actually one
## for gbm() that passes through
verbose = FALSE,
tuneGrid = gbmGrid)
gbmFit
## Stochastic Gradient Boosting
##
## 676 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 541, 541, 540, 540, 542, 540, ...
## Resampling results across tuning parameters:
##
## shrinkage interaction.depth n.minobsinnode n.trees Accuracy
## 0.1 1 10 5 0.7692200
## 0.1 1 10 10 0.7692200
## 0.1 1 10 15 0.7692200
## 0.1 1 10 20 0.7692200
## 0.1 1 10 25 0.7695163
## 0.1 1 10 30 0.7704052
## 0.1 1 10 35 0.7742549
## 0.1 1 10 40 0.7730763
## 0.1 1 10 45 0.7771961
## 0.1 1 10 50 0.7825339
## 0.1 1 20 5 0.7671459
## 0.1 1 20 10 0.7692200
## 0.1 1 20 15 0.7692200
## 0.1 1 20 20 0.7692200
## 0.1 1 20 25 0.7668562
## 0.1 1 20 30 0.7709956
## 0.1 1 20 35 0.7712897
## 0.1 1 20 40 0.7689325
## 0.1 1 20 45 0.7703964
## 0.1 1 20 50 0.7786775
## 0.1 1 30 5 0.7671459
## 0.1 1 30 10 0.7692200
## 0.1 1 30 15 0.7692200
## 0.1 1 30 20 0.7692200
## 0.1 1 30 25 0.7695207
## 0.1 1 30 30 0.7686318
## 0.1 1 30 35 0.7638932
## 0.1 1 30 40 0.7627080
## 0.1 1 30 45 0.7653616
## 0.1 1 30 50 0.7700827
## 0.1 2 10 5 0.7825295
## 0.1 2 10 10 0.7804619
## 0.1 2 10 15 0.7834294
## 0.1 2 10 20 0.7946757
## 0.1 2 10 25 0.7976300
## 0.1 2 10 30 0.7988194
## 0.1 2 10 35 0.7991202
## 0.1 2 10 40 0.8005864
## 0.1 2 10 45 0.7996996
## 0.1 2 10 50 0.8005907
## 0.1 2 20 5 0.7825295
## 0.1 2 20 10 0.7774924
## 0.1 2 20 15 0.7810458
## 0.1 2 20 20 0.7840088
## 0.1 2 20 25 0.7840022
## 0.1 2 20 30 0.7899195
## 0.1 2 20 35 0.7902224
## 0.1 2 20 40 0.7940655
## 0.1 2 20 45 0.7976234
## 0.1 2 20 50 0.7979153
## 0.1 2 30 5 0.7825295
## 0.1 2 30 10 0.7783725
## 0.1 2 30 15 0.7715707
## 0.1 2 30 20 0.7736645
## 0.1 2 30 25 0.7792722
## 0.1 2 30 30 0.7789956
## 0.1 2 30 35 0.7795838
## 0.1 2 30 40 0.7810588
## 0.1 2 30 45 0.7810566
## 0.1 2 30 50 0.7831285
## 0.1 3 10 5 0.7831199
## 0.1 3 10 10 0.7825185
## 0.1 3 10 15 0.7914010
## 0.1 3 10 20 0.8044230
## 0.1 3 10 25 0.8035559
## 0.1 3 10 30 0.8032486
## 0.1 3 10 35 0.8020743
## 0.1 3 10 40 0.8035428
## 0.1 3 10 45 0.8008849
## 0.1 3 10 50 0.8020810
## 0.1 3 20 5 0.7825295
## 0.1 3 20 10 0.7822419
## 0.1 3 20 15 0.7819413
## 0.1 3 20 20 0.7878607
## 0.1 3 20 25 0.7979393
## 0.1 3 20 30 0.8005974
## 0.1 3 20 35 0.8011944
## 0.1 3 20 40 0.8020920
## 0.1 3 20 45 0.8020833
## 0.1 3 20 50 0.8038589
## 0.1 3 30 5 0.7825295
## 0.1 3 30 10 0.7825295
## 0.1 3 30 15 0.7774990
## 0.1 3 30 20 0.7801504
## 0.1 3 30 25 0.7810546
## 0.1 3 30 30 0.7795731
## 0.1 3 30 35 0.7807539
## 0.1 3 30 40 0.7837169
## 0.1 3 30 45 0.7852071
## 0.1 3 30 50 0.7890372
## 0.1 4 10 5 0.7813486
## 0.1 4 10 10 0.7917169
## 0.1 4 10 15 0.7985209
## 0.1 4 10 20 0.8071422
## 0.1 4 10 25 0.8044734
## 0.1 4 10 30 0.8062316
## 0.1 4 10 35 0.8065345
## 0.1 4 10 40 0.8065212
## 0.1 4 10 45 0.8038655
## 0.1 4 10 50 0.8059221
## 0.1 4 20 5 0.7825295
## 0.1 4 20 10 0.7816428
## 0.1 4 20 15 0.7858085
## 0.1 4 20 20 0.7869740
## 0.1 4 20 25 0.7970285
## 0.1 4 20 30 0.8017868
## 0.1 4 20 35 0.8035449
## 0.1 4 20 40 0.8020744
## 0.1 4 20 45 0.8079918
## 0.1 4 20 50 0.8103557
## 0.1 4 30 5 0.7825295
## 0.1 4 30 10 0.7822332
## 0.1 4 30 15 0.7816514
## 0.1 4 30 20 0.7801612
## 0.1 4 30 25 0.7825273
## 0.1 4 30 30 0.7860895
## 0.1 4 30 35 0.7869653
## 0.1 4 30 40 0.7922943
## 0.1 4 30 45 0.7943553
## 0.1 4 30 50 0.7964272
## 0.1 5 10 5 0.7875577
## 0.1 5 10 10 0.7946581
## 0.1 5 10 15 0.8012008
## 0.1 5 10 20 0.8044515
## 0.1 5 10 25 0.8059307
## 0.1 5 10 30 0.8032662
## 0.1 5 10 35 0.8038786
## 0.1 5 10 40 0.8071248
## 0.1 5 10 45 0.8092033
## 0.1 5 10 50 0.8080049
## 0.1 5 20 5 0.7816406
## 0.1 5 20 10 0.7792768
## 0.1 5 20 15 0.7848955
## 0.1 5 20 20 0.7899151
## 0.1 5 20 25 0.7955340
## 0.1 5 20 30 0.7982204
## 0.1 5 20 35 0.7988174
## 0.1 5 20 40 0.8020636
## 0.1 5 20 45 0.8062206
## 0.1 5 20 50 0.8053207
## 0.1 5 30 5 0.7825295
## 0.1 5 30 10 0.7819369
## 0.1 5 30 15 0.7786644
## 0.1 5 30 20 0.7840219
## 0.1 5 30 25 0.7857954
## 0.1 5 30 30 0.7902137
## 0.1 5 30 35 0.7943508
## 0.1 5 30 40 0.7958390
## 0.1 5 30 45 0.7958390
## 0.1 5 30 50 0.7970308
## 0.2 1 10 5 0.7689259
## 0.2 1 10 10 0.7680370
## 0.2 1 10 15 0.7724707
## 0.2 1 10 20 0.7774990
## 0.2 1 10 25 0.7840023
## 0.2 1 10 30 0.7890284
## 0.2 1 10 35 0.7928715
## 0.2 1 10 40 0.7896188
## 0.2 1 10 45 0.7914054
## 0.2 1 10 50 0.7878476
## 0.2 1 20 5 0.7692200
## 0.2 1 20 10 0.7689237
## 0.2 1 20 15 0.7721917
## 0.2 1 20 20 0.7739607
## 0.2 1 20 25 0.7798760
## 0.2 1 20 30 0.7837147
## 0.2 1 20 35 0.7842984
## 0.2 1 20 40 0.7905186
## 0.2 1 20 45 0.7917082
## 0.2 1 20 50 0.7896364
## 0.2 1 30 5 0.7692200
## 0.2 1 30 10 0.7689259
## 0.2 1 30 15 0.7674225
## 0.2 1 30 20 0.7677363
## 0.2 1 30 25 0.7706753
## 0.2 1 30 30 0.7792703
## 0.2 1 30 35 0.7825252
## 0.2 1 30 40 0.7807560
## 0.2 1 30 45 0.7846037
## 0.2 1 30 50 0.7840132
## 0.2 2 10 5 0.7748410
## 0.2 2 10 10 0.7932160
## 0.2 2 10 15 0.8003097
## 0.2 2 10 20 0.8008892
## 0.2 2 10 25 0.7994076
## 0.2 2 10 30 0.7997258
## 0.2 2 10 35 0.8023905
## 0.2 2 10 40 0.8009047
## 0.2 2 10 45 0.7997281
## 0.2 2 10 50 0.7988458
## 0.2 2 20 5 0.7742417
## 0.2 2 20 10 0.7810325
## 0.2 2 20 15 0.7946603
## 0.2 2 20 20 0.7996799
## 0.2 2 20 25 0.8005688
## 0.2 2 20 30 0.7996844
## 0.2 2 20 35 0.7988020
## 0.2 2 20 40 0.8017803
## 0.2 2 20 45 0.8020701
## 0.2 2 20 50 0.8065299
## 0.2 2 30 5 0.7760043
## 0.2 2 30 10 0.7721699
## 0.2 2 30 15 0.7801503
## 0.2 2 30 20 0.7807429
## 0.2 2 30 25 0.7816318
## 0.2 2 30 30 0.7851919
## 0.2 2 30 35 0.7881506
## 0.2 2 30 40 0.7905165
## 0.2 2 30 45 0.7934730
## 0.2 2 30 50 0.7964360
## 0.2 3 10 5 0.7763180
## 0.2 3 10 10 0.7955580
## 0.2 3 10 15 0.8017782
## 0.2 3 10 20 0.8003165
## 0.2 3 10 25 0.8059462
## 0.2 3 10 30 0.8062512
## 0.2 3 10 35 0.8097696
## 0.2 3 10 40 0.8118678
## 0.2 3 10 45 0.8080246
## 0.2 3 10 50 0.8080113
## 0.2 3 20 5 0.7795665
## 0.2 3 20 10 0.7893313
## 0.2 3 20 15 0.7996866
## 0.2 3 20 20 0.7970154
## 0.2 3 20 25 0.7990808
## 0.2 3 20 30 0.8052988
## 0.2 3 20 35 0.8029306
## 0.2 3 20 40 0.8103381
## 0.2 3 20 45 0.8068087
## 0.2 3 20 50 0.8076801
## 0.2 3 30 5 0.7792658
## 0.2 3 30 10 0.7748060
## 0.2 3 30 15 0.7828214
## 0.2 3 30 20 0.7837169
## 0.2 3 30 25 0.7913944
## 0.2 3 30 30 0.7925995
## 0.2 3 30 35 0.7996933
## 0.2 3 30 40 0.7997019
## 0.2 3 30 45 0.8044494
## 0.2 3 30 50 0.8050310
## 0.2 4 10 5 0.7908192
## 0.2 4 10 10 0.8017716
## 0.2 4 10 15 0.8044579
## 0.2 4 10 20 0.8056322
## 0.2 4 10 25 0.8106780
## 0.2 4 10 30 0.8118502
## 0.2 4 10 35 0.8068175
## 0.2 4 10 40 0.8088872
## 0.2 4 10 45 0.8103644
## 0.2 4 10 50 0.8050376
## 0.2 4 20 5 0.7831286
## 0.2 4 20 10 0.7896561
## 0.2 4 20 15 0.7943838
## 0.2 4 20 20 0.8005908
## 0.2 4 20 25 0.8009111
## 0.2 4 20 30 0.8017782
## 0.2 4 20 35 0.8059352
## 0.2 4 20 40 0.8062075
## 0.2 4 20 45 0.8068001
## 0.2 4 20 50 0.8062097
## 0.2 4 30 5 0.7822332
## 0.2 4 30 10 0.7745448
## 0.2 4 30 15 0.7858064
## 0.2 4 30 20 0.7878696
## 0.2 4 30 25 0.7952508
## 0.2 4 30 30 0.8005646
## 0.2 4 30 35 0.7967280
## 0.2 4 30 40 0.7988043
## 0.2 4 30 45 0.8044122
## 0.2 4 30 50 0.7996844
## 0.2 5 10 5 0.7952419
## 0.2 5 10 10 0.7964493
## 0.2 5 10 15 0.7976344
## 0.2 5 10 20 0.7985101
## 0.2 5 10 25 0.8038567
## 0.2 5 10 30 0.8065278
## 0.2 5 10 35 0.8071096
## 0.2 5 10 40 0.8062161
## 0.2 5 10 45 0.8026781
## 0.2 5 10 50 0.7988219
## 0.2 5 20 5 0.7789585
## 0.2 5 20 10 0.7878563
## 0.2 5 20 15 0.7955602
## 0.2 5 20 20 0.8026452
## 0.2 5 20 25 0.8091770
## 0.2 5 20 30 0.8068021
## 0.2 5 20 35 0.8038479
## 0.2 5 20 40 0.8047390
## 0.2 5 20 45 0.7997238
## 0.2 5 20 50 0.8012032
## 0.2 5 30 5 0.7775077
## 0.2 5 30 10 0.7760349
## 0.2 5 30 15 0.7863968
## 0.2 5 30 20 0.7908106
## 0.2 5 30 25 0.7967478
## 0.2 5 30 30 0.8032314
## 0.2 5 30 35 0.8017629
## 0.2 5 30 40 0.8035385
## 0.2 5 30 45 0.8044319
## 0.2 5 30 50 0.8079656
## 0.3 1 10 5 0.7695163
## 0.3 1 10 10 0.7736666
## 0.3 1 10 15 0.7751416
## 0.3 1 10 20 0.7854707
## 0.3 1 10 25 0.7908084
## 0.3 1 10 30 0.7914011
## 0.3 1 10 35 0.7890328
## 0.3 1 10 40 0.7902202
## 0.3 1 10 45 0.7896167
## 0.3 1 10 50 0.7878432
## 0.3 1 20 5 0.7692090
## 0.3 1 20 10 0.7650652
## 0.3 1 20 15 0.7727472
## 0.3 1 20 20 0.7795644
## 0.3 1 20 25 0.7878521
## 0.3 1 20 30 0.7884425
## 0.3 1 20 35 0.7869675
## 0.3 1 20 40 0.7851831
## 0.3 1 20 45 0.7857846
## 0.3 1 20 50 0.7881680
## 0.3 1 30 5 0.7692200
## 0.3 1 30 10 0.7656535
## 0.3 1 30 15 0.7742330
## 0.3 1 30 20 0.7780894
## 0.3 1 30 25 0.7795577
## 0.3 1 30 30 0.7769018
## 0.3 1 30 35 0.7828148
## 0.3 1 30 40 0.7825032
## 0.3 1 30 45 0.7845991
## 0.3 1 30 50 0.7834074
## 0.3 2 10 5 0.7816580
## 0.3 2 10 10 0.7991005
## 0.3 2 10 15 0.8044229
## 0.3 2 10 20 0.8005843
## 0.3 2 10 25 0.7988195
## 0.3 2 10 30 0.7952791
## 0.3 2 10 35 0.7973424
## 0.3 2 10 40 0.7999914
## 0.3 2 10 45 0.8026911
## 0.3 2 10 50 0.8023731
## 0.3 2 20 5 0.7780937
## 0.3 2 20 10 0.7905384
## 0.3 2 20 15 0.7952530
## 0.3 2 20 20 0.7973336
## 0.3 2 20 25 0.7976342
## 0.3 2 20 30 0.7979306
## 0.3 2 20 35 0.7982467
## 0.3 2 20 40 0.8012031
## 0.3 2 20 45 0.8050594
## 0.3 2 20 50 0.8038654
## 0.3 2 30 5 0.7748541
## 0.3 2 30 10 0.7795599
## 0.3 2 30 15 0.7828257
## 0.3 2 30 20 0.7831309
## 0.3 2 30 25 0.7825208
## 0.3 2 30 30 0.7801746
## 0.3 2 30 35 0.7884556
## 0.3 2 30 40 0.7937956
## 0.3 2 30 45 0.7961464
## 0.3 2 30 50 0.7928826
## 0.3 3 10 5 0.7849019
## 0.3 3 10 10 0.7958740
## 0.3 3 10 15 0.8000047
## 0.3 3 10 20 0.7985342
## 0.3 3 10 25 0.8020897
## 0.3 3 10 30 0.8014949
## 0.3 3 10 35 0.7964666
## 0.3 3 10 40 0.7976585
## 0.3 3 10 45 0.7967499
## 0.3 3 10 50 0.8009069
## 0.3 3 20 5 0.7763357
## 0.3 3 20 10 0.7929307
## 0.3 3 20 15 0.7967650
## 0.3 3 20 20 0.7988327
## 0.3 3 20 25 0.8044471
## 0.3 3 20 30 0.8047653
## 0.3 3 20 35 0.8053470
## 0.3 3 20 40 0.8032905
## 0.3 3 20 45 0.8050551
## 0.3 3 20 50 0.8106761
## 0.3 3 30 5 0.7807297
## 0.3 3 30 10 0.7822068
## 0.3 3 30 15 0.7925644
## 0.3 3 30 20 0.7916886
## 0.3 3 30 25 0.7991071
## 0.3 3 30 30 0.7943685
## 0.3 3 30 35 0.7979327
## 0.3 3 30 40 0.8023707
## 0.3 3 30 45 0.8014929
## 0.3 3 30 50 0.8097673
## 0.3 4 10 5 0.7893665
## 0.3 4 10 10 0.7976364
## 0.3 4 10 15 0.8014905
## 0.3 4 10 20 0.8000308
## 0.3 4 10 25 0.8020721
## 0.3 4 10 30 0.8082728
## 0.3 4 10 35 0.8017737
## 0.3 4 10 40 0.8011746
## 0.3 4 10 45 0.8008828
## 0.3 4 10 50 0.7952618
## 0.3 4 20 5 0.7766188
## 0.3 4 20 10 0.7988065
## 0.3 4 20 15 0.8020615
## 0.3 4 20 20 0.8032511
## 0.3 4 20 25 0.8059155
## 0.3 4 20 30 0.8020899
## 0.3 4 20 35 0.8044559
## 0.3 4 20 40 0.8050464
## 0.3 4 20 45 0.8023709
## 0.3 4 20 50 0.8047238
## 0.3 4 30 5 0.7807692
## 0.3 4 30 10 0.7807429
## 0.3 4 30 15 0.7908083
## 0.3 4 30 20 0.8002791
## 0.3 4 30 25 0.8023686
## 0.3 4 30 30 0.7958411
## 0.3 4 30 35 0.8058979
## 0.3 4 30 40 0.8064971
## 0.3 4 30 45 0.8076889
## 0.3 4 30 50 0.8064886
## 0.3 5 10 5 0.7949631
## 0.3 5 10 10 0.7976430
## 0.3 5 10 15 0.8032662
## 0.3 5 10 20 0.8029721
## 0.3 5 10 25 0.8029807
## 0.3 5 10 30 0.8009067
## 0.3 5 10 35 0.8002967
## 0.3 5 10 40 0.8014687
## 0.3 5 10 45 0.8047368
## 0.3 5 10 50 0.7979131
## 0.3 5 20 5 0.7858084
## 0.3 5 20 10 0.8000134
## 0.3 5 20 15 0.7987889
## 0.3 5 20 20 0.8032772
## 0.3 5 20 25 0.8041531
## 0.3 5 20 30 0.8047390
## 0.3 5 20 35 0.8011878
## 0.3 5 20 40 0.8044580
## 0.3 5 20 45 0.8068241
## 0.3 5 20 50 0.8029437
## 0.3 5 30 5 0.7813617
## 0.3 5 30 10 0.7795929
## 0.3 5 30 15 0.7929002
## 0.3 5 30 20 0.7955537
## 0.3 5 30 25 0.7976235
## 0.3 5 30 30 0.8032576
## 0.3 5 30 35 0.8062118
## 0.3 5 30 40 0.8065016
## 0.3 5 30 45 0.8005908
## 0.3 5 30 50 0.7996977
## Kappa Accuracy SD Kappa SD
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5191203 0.03234106 0.06854046
## 0.5197694 0.03238810 0.06812746
## 0.5292037 0.03001320 0.06363436
## 0.5266549 0.03229885 0.06775505
## 0.5357588 0.03152644 0.06658660
## 0.5479427 0.03267741 0.06863636
## 0.5129120 0.03308138 0.07087347
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5125371 0.03207206 0.06827715
## 0.5217805 0.03423990 0.07265939
## 0.5227742 0.03461667 0.07339528
## 0.5187636 0.03172253 0.06657126
## 0.5219998 0.03306395 0.06978850
## 0.5388186 0.03601169 0.07497780
## 0.5129120 0.03308138 0.07087347
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5187623 0.03328308 0.07037158
## 0.5167253 0.03265732 0.06875086
## 0.5073610 0.03348434 0.07141457
## 0.5043920 0.03297063 0.07052438
## 0.5098637 0.03893390 0.08331708
## 0.5193747 0.03390441 0.07247806
## 0.5210296 0.03281182 0.07589674
## 0.5196404 0.03272431 0.07331539
## 0.5295893 0.03507608 0.07675815
## 0.5550468 0.03064376 0.06981859
## 0.5635714 0.03321750 0.07440048
## 0.5688584 0.03679681 0.08065808
## 0.5697985 0.03534985 0.07761932
## 0.5737398 0.03632389 0.07937564
## 0.5730305 0.03613155 0.07896953
## 0.5764080 0.04051097 0.08724491
## 0.5210296 0.03281182 0.07589674
## 0.5121614 0.03047265 0.07121906
## 0.5212090 0.03068615 0.07152340
## 0.5319495 0.03068007 0.06994657
## 0.5346366 0.03246633 0.07218970
## 0.5489193 0.03396812 0.07489160
## 0.5511353 0.03399065 0.07388498
## 0.5594119 0.03750440 0.08189125
## 0.5680006 0.03801944 0.08377953
## 0.5694889 0.03890836 0.08560819
## 0.5210296 0.03281182 0.07589674
## 0.5143389 0.03198798 0.07338106
## 0.5044280 0.03512856 0.07966227
## 0.5105136 0.03012137 0.06749083
## 0.5232800 0.03980970 0.08676608
## 0.5238435 0.03922048 0.08605167
## 0.5272517 0.03680009 0.08102951
## 0.5307358 0.03935275 0.08728833
## 0.5313922 0.04128408 0.09117526
## 0.5368732 0.04133045 0.09132017
## 0.5236322 0.03238332 0.07452237
## 0.5276644 0.03490517 0.07812823
## 0.5509909 0.03218069 0.07247623
## 0.5795665 0.03329984 0.07493349
## 0.5795425 0.03319484 0.07378773
## 0.5797838 0.03769627 0.08314820
## 0.5788753 0.03905971 0.08568866
## 0.5825189 0.03668385 0.08062980
## 0.5778401 0.04044120 0.08778295
## 0.5799190 0.04021630 0.08779683
## 0.5210296 0.03281182 0.07589674
## 0.5217859 0.03379657 0.07864503
## 0.5264079 0.03404875 0.07724029
## 0.5446845 0.03824793 0.08301641
## 0.5680657 0.03954969 0.08666314
## 0.5746614 0.03586512 0.07915909
## 0.5754097 0.03432135 0.07664862
## 0.5791768 0.03658954 0.08028893
## 0.5802257 0.03853887 0.08369594
## 0.5842352 0.03969796 0.08619462
## 0.5210296 0.03281182 0.07589674
## 0.5211282 0.03281182 0.07590869
## 0.5143273 0.03586893 0.08118366
## 0.5237615 0.03355769 0.07519913
## 0.5276281 0.03688102 0.08153954
## 0.5263837 0.03831999 0.08439204
## 0.5317599 0.03688078 0.08101987
## 0.5389632 0.03608005 0.07966941
## 0.5425207 0.03653517 0.08058609
## 0.5514920 0.03306540 0.07272810
## 0.5204990 0.03442250 0.07799758
## 0.5482350 0.03152948 0.07239077
## 0.5661017 0.03355598 0.07590924
## 0.5871777 0.03965501 0.08756962
## 0.5829752 0.04061988 0.08861809
## 0.5874944 0.03543665 0.07770856
## 0.5899804 0.04210912 0.09211458
## 0.5903280 0.04254705 0.09274102
## 0.5855795 0.04364052 0.09388850
## 0.5896841 0.04181467 0.09022895
## 0.5210296 0.03281182 0.07589674
## 0.5228649 0.03344102 0.07686302
## 0.5356187 0.03448879 0.07813136
## 0.5423391 0.04277478 0.09388909
## 0.5654346 0.04031012 0.08767534
## 0.5771612 0.04193320 0.09217213
## 0.5822795 0.04398321 0.09571264
## 0.5804765 0.04081500 0.08854718
## 0.5937126 0.04037305 0.08676155
## 0.5999357 0.04228673 0.09031525
## 0.5210296 0.03281182 0.07589674
## 0.5205376 0.03190186 0.07440786
## 0.5228057 0.03284034 0.07551231
## 0.5242798 0.03508364 0.07893724
## 0.5332382 0.03613066 0.08126921
## 0.5424308 0.03676733 0.08159230
## 0.5454652 0.03534139 0.07856561
## 0.5573948 0.03927584 0.08663164
## 0.5633977 0.03932614 0.08591894
## 0.5687352 0.04151075 0.09032322
## 0.5366426 0.03717174 0.08339964
## 0.5591298 0.03359876 0.07410411
## 0.5750715 0.03517323 0.07747841
## 0.5837964 0.03636965 0.07896338
## 0.5880846 0.03627035 0.07910978
## 0.5827740 0.03840365 0.08362873
## 0.5852619 0.03807153 0.08344146
## 0.5931229 0.03613146 0.07719163
## 0.5983229 0.03586319 0.07633857
## 0.5960238 0.03814359 0.08171305
## 0.5195215 0.03365416 0.07764080
## 0.5177603 0.03033465 0.07016951
## 0.5349548 0.03418802 0.07697207
## 0.5495220 0.03651616 0.08086185
## 0.5639333 0.04100075 0.09030176
## 0.5710154 0.03734297 0.08234066
## 0.5737560 0.03723285 0.08191723
## 0.5814023 0.04163711 0.09149771
## 0.5914669 0.03826183 0.08343730
## 0.5896858 0.03915350 0.08469542
## 0.5210296 0.03281182 0.07589674
## 0.5201440 0.03261634 0.07587571
## 0.5166205 0.03404089 0.07970591
## 0.5329872 0.03647345 0.08310136
## 0.5389088 0.03407723 0.07744444
## 0.5515756 0.03661211 0.08031136
## 0.5621379 0.04050150 0.08820487
## 0.5662897 0.03834789 0.08338702
## 0.5675135 0.04088093 0.08815303
## 0.5708063 0.04090293 0.08772237
## 0.5169710 0.03374533 0.07158638
## 0.5162331 0.03269375 0.06943390
## 0.5245889 0.03070576 0.06715717
## 0.5355876 0.03486774 0.07256399
## 0.5492955 0.03216373 0.06772441
## 0.5608190 0.03440854 0.07130463
## 0.5691649 0.03315484 0.06908724
## 0.5625317 0.03483139 0.07193866
## 0.5660156 0.03527126 0.07450274
## 0.5586791 0.03683458 0.07672224
## 0.5172302 0.03393878 0.07222091
## 0.5169430 0.03409502 0.07211000
## 0.5240244 0.03557313 0.07488809
## 0.5280154 0.03471953 0.07364855
## 0.5404931 0.03558414 0.07557249
## 0.5481914 0.03740993 0.08046742
## 0.5509045 0.03610213 0.07657604
## 0.5635013 0.03199403 0.06629718
## 0.5661979 0.03208350 0.06716770
## 0.5619031 0.02674801 0.05608425
## 0.5183773 0.03393878 0.07226845
## 0.5169710 0.03374533 0.07158638
## 0.5125321 0.03600090 0.07978609
## 0.5147364 0.03493376 0.07436015
## 0.5207422 0.03795410 0.07978023
## 0.5389725 0.03502212 0.07350799
## 0.5466634 0.03728317 0.07734130
## 0.5424862 0.03634710 0.07594400
## 0.5499888 0.03600662 0.07591392
## 0.5487772 0.03527150 0.07420066
## 0.5151364 0.03460563 0.07336020
## 0.5538143 0.03712949 0.08119513
## 0.5724385 0.03689205 0.08133921
## 0.5773178 0.03612364 0.07962547
## 0.5740210 0.03918786 0.08629756
## 0.5759474 0.03987098 0.08617559
## 0.5822817 0.03960086 0.08590261
## 0.5804765 0.03985016 0.08644292
## 0.5782216 0.04412113 0.09537168
## 0.5765517 0.04134973 0.08918746
## 0.5098323 0.03443781 0.07665337
## 0.5288048 0.03996181 0.08776722
## 0.5594083 0.03570276 0.07947883
## 0.5724996 0.03780955 0.08337814
## 0.5752432 0.03558608 0.07731139
## 0.5751618 0.03482490 0.07598149
## 0.5731883 0.03248281 0.07174552
## 0.5807215 0.03461498 0.07485997
## 0.5818974 0.03608299 0.07755620
## 0.5916328 0.03971055 0.08536831
## 0.5120512 0.03311360 0.07302380
## 0.5114586 0.03571320 0.07688703
## 0.5284180 0.03577555 0.07747355
## 0.5319119 0.03714831 0.08035902
## 0.5360551 0.03769851 0.08012579
## 0.5449478 0.03594037 0.07748335
## 0.5513411 0.03999050 0.08638223
## 0.5563151 0.03910416 0.08460597
## 0.5624419 0.04117473 0.08954341
## 0.5694958 0.03881296 0.08388195
## 0.5177990 0.03421471 0.07375515
## 0.5640656 0.03798410 0.08092939
## 0.5798566 0.03466880 0.07506243
## 0.5782000 0.04111478 0.08872517
## 0.5901665 0.04483206 0.09609667
## 0.5916416 0.03778544 0.08048460
## 0.5993856 0.03511544 0.07528379
## 0.6043174 0.03860621 0.08277416
## 0.5961063 0.04054591 0.08697952
## 0.5963522 0.03387993 0.07235589
## 0.5176038 0.03325828 0.07688195
## 0.5465009 0.03936737 0.08729711
## 0.5715992 0.04143881 0.09038113
## 0.5687730 0.03515260 0.07676421
## 0.5754229 0.03956888 0.08532286
## 0.5890040 0.03771307 0.08116247
## 0.5846145 0.03944418 0.08384134
## 0.6002162 0.04087528 0.08719130
## 0.5934902 0.04227046 0.09028416
## 0.5948964 0.04315231 0.09226684
## 0.5152369 0.03341526 0.07674052
## 0.5134326 0.03320953 0.07510068
## 0.5335568 0.02926585 0.06764545
## 0.5382668 0.03257208 0.07217027
## 0.5566589 0.03333004 0.07278096
## 0.5602561 0.03725714 0.08064917
## 0.5761592 0.03810637 0.08299231
## 0.5772956 0.04030138 0.08668568
## 0.5876205 0.03888818 0.08439298
## 0.5890094 0.03961574 0.08577761
## 0.5509863 0.03992643 0.08878486
## 0.5780249 0.03826006 0.08197373
## 0.5858111 0.03967289 0.08394771
## 0.5901439 0.03660329 0.07725096
## 0.6021863 0.03712531 0.07782299
## 0.6036144 0.03781755 0.08098577
## 0.5937869 0.03519088 0.07487157
## 0.5982164 0.04154017 0.08860018
## 0.6019594 0.04097429 0.08774003
## 0.5911334 0.03944279 0.08385222
## 0.5254849 0.03347740 0.07709293
## 0.5468191 0.03478796 0.07894762
## 0.5635063 0.03358091 0.07253656
## 0.5781834 0.03480269 0.07536725
## 0.5802188 0.03902883 0.08371333
## 0.5824465 0.03997173 0.08605627
## 0.5911436 0.03767634 0.08094850
## 0.5918902 0.03724839 0.07964735
## 0.5937591 0.03639199 0.07726405
## 0.5931066 0.03680520 0.07810488
## 0.5220965 0.03391652 0.07865779
## 0.5144430 0.03136557 0.07135533
## 0.5430473 0.03650011 0.08003451
## 0.5497814 0.03965942 0.08609346
## 0.5663920 0.04107202 0.08783240
## 0.5794407 0.03665504 0.07848265
## 0.5716394 0.03972462 0.08530502
## 0.5758561 0.03762483 0.08120198
## 0.5883612 0.03199938 0.06844049
## 0.5784763 0.03602444 0.07732959
## 0.5576538 0.03473094 0.08018002
## 0.5679164 0.03516879 0.07750935
## 0.5721984 0.04283439 0.09362600
## 0.5764187 0.04339775 0.09237538
## 0.5866252 0.04216237 0.09185605
## 0.5931450 0.04302776 0.09235933
## 0.5954580 0.03768359 0.08123017
## 0.5939111 0.03249291 0.07100813
## 0.5866180 0.03705052 0.07874142
## 0.5792904 0.03433515 0.07412373
## 0.5193730 0.03584080 0.08152691
## 0.5450906 0.04064278 0.08958551
## 0.5672573 0.03953345 0.08561184
## 0.5823305 0.03916616 0.08467624
## 0.5976332 0.03794928 0.08048823
## 0.5934562 0.03748358 0.07956714
## 0.5879086 0.03700696 0.07830587
## 0.5895409 0.03621346 0.07695299
## 0.5791640 0.03237916 0.06950094
## 0.5826546 0.03544921 0.07528752
## 0.5126078 0.03327680 0.07654340
## 0.5188632 0.03681436 0.08227781
## 0.5447337 0.04201053 0.09206103
## 0.5553937 0.03926278 0.08654355
## 0.5701376 0.04494570 0.09631607
## 0.5844844 0.04114056 0.08873242
## 0.5819877 0.04512505 0.09740622
## 0.5857281 0.04002385 0.08673185
## 0.5879042 0.04219032 0.09091708
## 0.5955157 0.03835778 0.08319063
## 0.5173230 0.03510666 0.07406089
## 0.5263093 0.03661521 0.07845586
## 0.5302914 0.03918775 0.08245652
## 0.5533769 0.04193622 0.08725709
## 0.5637770 0.04206413 0.08659977
## 0.5652285 0.04068528 0.08564903
## 0.5611792 0.04267703 0.08903293
## 0.5624020 0.04427843 0.09367776
## 0.5616826 0.04202142 0.09026935
## 0.5578672 0.03872668 0.08099412
## 0.5163474 0.03874189 0.08142296
## 0.5083774 0.03806797 0.07969086
## 0.5250499 0.03440642 0.07345228
## 0.5411946 0.03322540 0.06947629
## 0.5582229 0.03128788 0.06635300
## 0.5590008 0.03705383 0.07747178
## 0.5561777 0.03354009 0.06951671
## 0.5528666 0.03321974 0.06868763
## 0.5527689 0.03429193 0.07161825
## 0.5588035 0.03654684 0.07654890
## 0.5183773 0.03393878 0.07226845
## 0.5066314 0.03303584 0.07170006
## 0.5280664 0.04124586 0.08485467
## 0.5362309 0.03694530 0.07928800
## 0.5393689 0.03855971 0.08176881
## 0.5327965 0.03929538 0.08327213
## 0.5458970 0.03722415 0.07779203
## 0.5461336 0.03982100 0.08489335
## 0.5497313 0.03813788 0.08033136
## 0.5468864 0.03766296 0.07937718
## 0.5325504 0.04017198 0.08873467
## 0.5698391 0.03441056 0.07765143
## 0.5841508 0.03645767 0.07969209
## 0.5783758 0.03929615 0.08568416
## 0.5753212 0.03704756 0.08080705
## 0.5687679 0.04008192 0.08537032
## 0.5733113 0.04185164 0.09130995
## 0.5791768 0.03558646 0.07655883
## 0.5857847 0.03439634 0.07326127
## 0.5854023 0.03541230 0.07490209
## 0.5197226 0.03122924 0.07071840
## 0.5506291 0.03480891 0.07801667
## 0.5654541 0.03719945 0.08181575
## 0.5706060 0.03744835 0.08234192
## 0.5714652 0.04449873 0.09559615
## 0.5728319 0.04073704 0.08825683
## 0.5740299 0.04263504 0.09236888
## 0.5810442 0.03913269 0.08428265
## 0.5896073 0.03992316 0.08592560
## 0.5873714 0.04108527 0.08779392
## 0.5137135 0.03146380 0.07129480
## 0.5277143 0.03833634 0.08308231
## 0.5365923 0.03917834 0.08486242
## 0.5384600 0.04131852 0.08942134
## 0.5384556 0.03503511 0.07625280
## 0.5347983 0.04224760 0.09165276
## 0.5520973 0.04081726 0.08841518
## 0.5643723 0.03757131 0.08082400
## 0.5697999 0.04217028 0.09021698
## 0.5637296 0.04227767 0.09004172
## 0.5375323 0.03410659 0.07570885
## 0.5667587 0.04158580 0.08918644
## 0.5782773 0.03389681 0.07285345
## 0.5757450 0.03953227 0.08546941
## 0.5841031 0.03632392 0.07881900
## 0.5831806 0.03550018 0.07566367
## 0.5731353 0.03546272 0.07551706
## 0.5765515 0.03365441 0.07061175
## 0.5748990 0.03368857 0.07063089
## 0.5836300 0.03780360 0.07907016
## 0.5176877 0.03303860 0.07474052
## 0.5594043 0.03374972 0.07443424
## 0.5700692 0.04089944 0.08831211
## 0.5751357 0.03777140 0.08125848
## 0.5872933 0.03807141 0.08222702
## 0.5889305 0.04001726 0.08603474
## 0.5903879 0.03964014 0.08422658
## 0.5868624 0.04026519 0.08596284
## 0.5900932 0.04228541 0.09058700
## 0.6014563 0.04352503 0.09410263
## 0.5238863 0.03568051 0.08196458
## 0.5352401 0.03989012 0.08855209
## 0.5584502 0.04350113 0.09614454
## 0.5597404 0.04374462 0.09481454
## 0.5756727 0.03775187 0.08119077
## 0.5666469 0.04125414 0.08845772
## 0.5743438 0.04110343 0.08856086
## 0.5847041 0.03663583 0.07786538
## 0.5827792 0.03563940 0.07715386
## 0.6003405 0.03701778 0.07932249
## 0.5504019 0.03053928 0.06766362
## 0.5718700 0.04151282 0.08999472
## 0.5822923 0.04045436 0.08661237
## 0.5800868 0.04193850 0.08931330
## 0.5850621 0.03625584 0.07751109
## 0.5980514 0.03152014 0.06747862
## 0.5844063 0.03626072 0.07741476
## 0.5843031 0.03365481 0.07117046
## 0.5841121 0.03763028 0.08003716
## 0.5721526 0.03905111 0.08237885
## 0.5200005 0.03691389 0.08206691
## 0.5741727 0.03541807 0.07702171
## 0.5832893 0.03726587 0.08043449
## 0.5864972 0.03627496 0.07876949
## 0.5915313 0.03397767 0.07338427
## 0.5853462 0.03308261 0.07077401
## 0.5905677 0.03408001 0.07216388
## 0.5918596 0.03413789 0.07254356
## 0.5860294 0.03682606 0.07881973
## 0.5910720 0.03555247 0.07517700
## 0.5269860 0.03579981 0.07883735
## 0.5330094 0.03061641 0.06687986
## 0.5569869 0.03024174 0.06575329
## 0.5787363 0.03433392 0.07473972
## 0.5845707 0.03280745 0.07011649
## 0.5711026 0.02985536 0.06347885
## 0.5926910 0.03196354 0.06819938
## 0.5946017 0.03117025 0.06646160
## 0.5969273 0.03327437 0.07167926
## 0.5944426 0.03764294 0.08125867
## 0.5642591 0.03509784 0.07621106
## 0.5726951 0.03874634 0.08313882
## 0.5867072 0.04308909 0.08995230
## 0.5873029 0.04068917 0.08586542
## 0.5867448 0.04116462 0.08711239
## 0.5834792 0.03637273 0.07761817
## 0.5817994 0.03480958 0.07324626
## 0.5850144 0.03450280 0.07208284
## 0.5926059 0.03896494 0.08162206
## 0.5786861 0.03940778 0.08163989
## 0.5385558 0.03535088 0.08200617
## 0.5769759 0.03544124 0.07719127
## 0.5771997 0.03541039 0.07539439
## 0.5869317 0.04212114 0.09025827
## 0.5905111 0.03548789 0.07552194
## 0.5917737 0.03609223 0.07706930
## 0.5841522 0.04008411 0.08544700
## 0.5910386 0.03791604 0.08045472
## 0.5962463 0.03756961 0.08026425
## 0.5883126 0.03541171 0.07591678
## 0.5272089 0.03199805 0.07369057
## 0.5314608 0.03926174 0.08501290
## 0.5629267 0.03993214 0.08546334
## 0.5695637 0.03926128 0.08264877
## 0.5733012 0.03699451 0.07883112
## 0.5853242 0.03761768 0.08007517
## 0.5917886 0.03665754 0.07782738
## 0.5937817 0.03653243 0.07764409
## 0.5815142 0.04189606 0.08997057
## 0.5805268 0.03831341 0.08099892
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were n.trees = 40, interaction.depth
## = 3, shrinkage = 0.2 and n.minobsinnode = 10.
trellis.par.set(caretTheme())
plot(gbmFit)
ggplot(gbmFit)
##Accuracy 0.8118678 (Stand.Dev. 0.03860621)
logisticReg #Accuracy 0.7887516 (Stand.Dev. 0.03060581)
## Generalized Linear Model
##
## 676 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 540, 541, 541, 541, 541, 541, ...
## Resampling results
##
## Accuracy Kappa Accuracy SD Kappa SD
## 0.7887516 0.5613619 0.03060581 0.0632509
##
##
BayesianLogReg #Accuracy 0.7896339 (Stand.Dev. 0.03101783)
## Bayesian Generalized Linear Model
##
## 676 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 540, 541, 541, 541, 541, 541, ...
## Resampling results
##
## Accuracy Kappa Accuracy SD Kappa SD
## 0.7896339 0.5629991 0.03101783 0.06455036
##
##
rpartTune1 #Accuracy 0.7872922 (Stand.Dev. 0.03179111)
## CART
##
## 676 samples
## 7 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 540, 541, 541, 541, 541, 541, ...
## Resampling results across tuning parameters:
##
## cp Accuracy Kappa Accuracy SD Kappa SD
## 0.00000000 0.7831285 0.5484498 0.03036959 0.06272175
## 0.04964539 0.7635967 0.5025345 0.02416022 0.05098645
## 0.09929078 0.7692133 0.5186312 0.02783123 0.05781905
## 0.14893617 0.7692133 0.5186312 0.02783123 0.05781905
## 0.19858156 0.7692133 0.5186312 0.02783123 0.05781905
## 0.24822695 0.7692133 0.5186312 0.02783123 0.05781905
## 0.29787234 0.7692133 0.5186312 0.02783123 0.05781905
## 0.34751773 0.7692133 0.5186312 0.02783123 0.05781905
## 0.39716312 0.7692133 0.5186312 0.02783123 0.05781905
## 0.44680851 0.6766599 0.2684261 0.08554935 0.24454961
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was cp = 0.
rpartTune2 #Accuracy 0.7884292 (Stand.Dev. 0.03934454)
## CART
##
## 676 samples
## 7 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 541, 540, 541, 541, 541, 540, ...
## Resampling results across tuning parameters:
##
## maxdepth Accuracy Kappa Accuracy SD Kappa SD
## 1 0.7692321 0.5183231 0.03458839 0.07405691
## 2 0.7733913 0.5182442 0.03141861 0.06554766
## 4 0.7825918 0.5401786 0.03562312 0.07510554
## 5 0.7831889 0.5416102 0.03793152 0.08119068
## 9 0.7873044 0.5509052 0.03261511 0.06941881
## 10 0.7873044 0.5509052 0.03261511 0.06941881
## 16 0.7873044 0.5509052 0.03261511 0.06941881
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was maxdepth = 9.
randomForestFit #Accuracy 0.8011962 (Stand.Dev. 0.03848834)
## Random Forest
##
## 676 samples
## 7 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 540, 541, 541, 541, 541, 541, ...
## Resampling results across tuning parameters:
##
## mtry Accuracy Kappa Accuracy SD Kappa SD
## 1 0.7914380 0.5574521 0.03347622 0.06811380
## 2 0.8006145 0.5827785 0.03956907 0.08074349
## 3 0.8011962 0.5855667 0.03848834 0.07869026
## 4 0.7872856 0.5600704 0.02946737 0.06110196
## 5 0.7837233 0.5532518 0.03291143 0.06590668
##
## Accuracy was used to select the optimal model using the largest value.
## The final value used for the model was mtry = 3.
gbmFit #Accuracy 0.8118678 (Stand.Dev. 0.03860621)
## Stochastic Gradient Boosting
##
## 676 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 541, 541, 540, 540, 542, 540, ...
## Resampling results across tuning parameters:
##
## shrinkage interaction.depth n.minobsinnode n.trees Accuracy
## 0.1 1 10 5 0.7692200
## 0.1 1 10 10 0.7692200
## 0.1 1 10 15 0.7692200
## 0.1 1 10 20 0.7692200
## 0.1 1 10 25 0.7695163
## 0.1 1 10 30 0.7704052
## 0.1 1 10 35 0.7742549
## 0.1 1 10 40 0.7730763
## 0.1 1 10 45 0.7771961
## 0.1 1 10 50 0.7825339
## 0.1 1 20 5 0.7671459
## 0.1 1 20 10 0.7692200
## 0.1 1 20 15 0.7692200
## 0.1 1 20 20 0.7692200
## 0.1 1 20 25 0.7668562
## 0.1 1 20 30 0.7709956
## 0.1 1 20 35 0.7712897
## 0.1 1 20 40 0.7689325
## 0.1 1 20 45 0.7703964
## 0.1 1 20 50 0.7786775
## 0.1 1 30 5 0.7671459
## 0.1 1 30 10 0.7692200
## 0.1 1 30 15 0.7692200
## 0.1 1 30 20 0.7692200
## 0.1 1 30 25 0.7695207
## 0.1 1 30 30 0.7686318
## 0.1 1 30 35 0.7638932
## 0.1 1 30 40 0.7627080
## 0.1 1 30 45 0.7653616
## 0.1 1 30 50 0.7700827
## 0.1 2 10 5 0.7825295
## 0.1 2 10 10 0.7804619
## 0.1 2 10 15 0.7834294
## 0.1 2 10 20 0.7946757
## 0.1 2 10 25 0.7976300
## 0.1 2 10 30 0.7988194
## 0.1 2 10 35 0.7991202
## 0.1 2 10 40 0.8005864
## 0.1 2 10 45 0.7996996
## 0.1 2 10 50 0.8005907
## 0.1 2 20 5 0.7825295
## 0.1 2 20 10 0.7774924
## 0.1 2 20 15 0.7810458
## 0.1 2 20 20 0.7840088
## 0.1 2 20 25 0.7840022
## 0.1 2 20 30 0.7899195
## 0.1 2 20 35 0.7902224
## 0.1 2 20 40 0.7940655
## 0.1 2 20 45 0.7976234
## 0.1 2 20 50 0.7979153
## 0.1 2 30 5 0.7825295
## 0.1 2 30 10 0.7783725
## 0.1 2 30 15 0.7715707
## 0.1 2 30 20 0.7736645
## 0.1 2 30 25 0.7792722
## 0.1 2 30 30 0.7789956
## 0.1 2 30 35 0.7795838
## 0.1 2 30 40 0.7810588
## 0.1 2 30 45 0.7810566
## 0.1 2 30 50 0.7831285
## 0.1 3 10 5 0.7831199
## 0.1 3 10 10 0.7825185
## 0.1 3 10 15 0.7914010
## 0.1 3 10 20 0.8044230
## 0.1 3 10 25 0.8035559
## 0.1 3 10 30 0.8032486
## 0.1 3 10 35 0.8020743
## 0.1 3 10 40 0.8035428
## 0.1 3 10 45 0.8008849
## 0.1 3 10 50 0.8020810
## 0.1 3 20 5 0.7825295
## 0.1 3 20 10 0.7822419
## 0.1 3 20 15 0.7819413
## 0.1 3 20 20 0.7878607
## 0.1 3 20 25 0.7979393
## 0.1 3 20 30 0.8005974
## 0.1 3 20 35 0.8011944
## 0.1 3 20 40 0.8020920
## 0.1 3 20 45 0.8020833
## 0.1 3 20 50 0.8038589
## 0.1 3 30 5 0.7825295
## 0.1 3 30 10 0.7825295
## 0.1 3 30 15 0.7774990
## 0.1 3 30 20 0.7801504
## 0.1 3 30 25 0.7810546
## 0.1 3 30 30 0.7795731
## 0.1 3 30 35 0.7807539
## 0.1 3 30 40 0.7837169
## 0.1 3 30 45 0.7852071
## 0.1 3 30 50 0.7890372
## 0.1 4 10 5 0.7813486
## 0.1 4 10 10 0.7917169
## 0.1 4 10 15 0.7985209
## 0.1 4 10 20 0.8071422
## 0.1 4 10 25 0.8044734
## 0.1 4 10 30 0.8062316
## 0.1 4 10 35 0.8065345
## 0.1 4 10 40 0.8065212
## 0.1 4 10 45 0.8038655
## 0.1 4 10 50 0.8059221
## 0.1 4 20 5 0.7825295
## 0.1 4 20 10 0.7816428
## 0.1 4 20 15 0.7858085
## 0.1 4 20 20 0.7869740
## 0.1 4 20 25 0.7970285
## 0.1 4 20 30 0.8017868
## 0.1 4 20 35 0.8035449
## 0.1 4 20 40 0.8020744
## 0.1 4 20 45 0.8079918
## 0.1 4 20 50 0.8103557
## 0.1 4 30 5 0.7825295
## 0.1 4 30 10 0.7822332
## 0.1 4 30 15 0.7816514
## 0.1 4 30 20 0.7801612
## 0.1 4 30 25 0.7825273
## 0.1 4 30 30 0.7860895
## 0.1 4 30 35 0.7869653
## 0.1 4 30 40 0.7922943
## 0.1 4 30 45 0.7943553
## 0.1 4 30 50 0.7964272
## 0.1 5 10 5 0.7875577
## 0.1 5 10 10 0.7946581
## 0.1 5 10 15 0.8012008
## 0.1 5 10 20 0.8044515
## 0.1 5 10 25 0.8059307
## 0.1 5 10 30 0.8032662
## 0.1 5 10 35 0.8038786
## 0.1 5 10 40 0.8071248
## 0.1 5 10 45 0.8092033
## 0.1 5 10 50 0.8080049
## 0.1 5 20 5 0.7816406
## 0.1 5 20 10 0.7792768
## 0.1 5 20 15 0.7848955
## 0.1 5 20 20 0.7899151
## 0.1 5 20 25 0.7955340
## 0.1 5 20 30 0.7982204
## 0.1 5 20 35 0.7988174
## 0.1 5 20 40 0.8020636
## 0.1 5 20 45 0.8062206
## 0.1 5 20 50 0.8053207
## 0.1 5 30 5 0.7825295
## 0.1 5 30 10 0.7819369
## 0.1 5 30 15 0.7786644
## 0.1 5 30 20 0.7840219
## 0.1 5 30 25 0.7857954
## 0.1 5 30 30 0.7902137
## 0.1 5 30 35 0.7943508
## 0.1 5 30 40 0.7958390
## 0.1 5 30 45 0.7958390
## 0.1 5 30 50 0.7970308
## 0.2 1 10 5 0.7689259
## 0.2 1 10 10 0.7680370
## 0.2 1 10 15 0.7724707
## 0.2 1 10 20 0.7774990
## 0.2 1 10 25 0.7840023
## 0.2 1 10 30 0.7890284
## 0.2 1 10 35 0.7928715
## 0.2 1 10 40 0.7896188
## 0.2 1 10 45 0.7914054
## 0.2 1 10 50 0.7878476
## 0.2 1 20 5 0.7692200
## 0.2 1 20 10 0.7689237
## 0.2 1 20 15 0.7721917
## 0.2 1 20 20 0.7739607
## 0.2 1 20 25 0.7798760
## 0.2 1 20 30 0.7837147
## 0.2 1 20 35 0.7842984
## 0.2 1 20 40 0.7905186
## 0.2 1 20 45 0.7917082
## 0.2 1 20 50 0.7896364
## 0.2 1 30 5 0.7692200
## 0.2 1 30 10 0.7689259
## 0.2 1 30 15 0.7674225
## 0.2 1 30 20 0.7677363
## 0.2 1 30 25 0.7706753
## 0.2 1 30 30 0.7792703
## 0.2 1 30 35 0.7825252
## 0.2 1 30 40 0.7807560
## 0.2 1 30 45 0.7846037
## 0.2 1 30 50 0.7840132
## 0.2 2 10 5 0.7748410
## 0.2 2 10 10 0.7932160
## 0.2 2 10 15 0.8003097
## 0.2 2 10 20 0.8008892
## 0.2 2 10 25 0.7994076
## 0.2 2 10 30 0.7997258
## 0.2 2 10 35 0.8023905
## 0.2 2 10 40 0.8009047
## 0.2 2 10 45 0.7997281
## 0.2 2 10 50 0.7988458
## 0.2 2 20 5 0.7742417
## 0.2 2 20 10 0.7810325
## 0.2 2 20 15 0.7946603
## 0.2 2 20 20 0.7996799
## 0.2 2 20 25 0.8005688
## 0.2 2 20 30 0.7996844
## 0.2 2 20 35 0.7988020
## 0.2 2 20 40 0.8017803
## 0.2 2 20 45 0.8020701
## 0.2 2 20 50 0.8065299
## 0.2 2 30 5 0.7760043
## 0.2 2 30 10 0.7721699
## 0.2 2 30 15 0.7801503
## 0.2 2 30 20 0.7807429
## 0.2 2 30 25 0.7816318
## 0.2 2 30 30 0.7851919
## 0.2 2 30 35 0.7881506
## 0.2 2 30 40 0.7905165
## 0.2 2 30 45 0.7934730
## 0.2 2 30 50 0.7964360
## 0.2 3 10 5 0.7763180
## 0.2 3 10 10 0.7955580
## 0.2 3 10 15 0.8017782
## 0.2 3 10 20 0.8003165
## 0.2 3 10 25 0.8059462
## 0.2 3 10 30 0.8062512
## 0.2 3 10 35 0.8097696
## 0.2 3 10 40 0.8118678
## 0.2 3 10 45 0.8080246
## 0.2 3 10 50 0.8080113
## 0.2 3 20 5 0.7795665
## 0.2 3 20 10 0.7893313
## 0.2 3 20 15 0.7996866
## 0.2 3 20 20 0.7970154
## 0.2 3 20 25 0.7990808
## 0.2 3 20 30 0.8052988
## 0.2 3 20 35 0.8029306
## 0.2 3 20 40 0.8103381
## 0.2 3 20 45 0.8068087
## 0.2 3 20 50 0.8076801
## 0.2 3 30 5 0.7792658
## 0.2 3 30 10 0.7748060
## 0.2 3 30 15 0.7828214
## 0.2 3 30 20 0.7837169
## 0.2 3 30 25 0.7913944
## 0.2 3 30 30 0.7925995
## 0.2 3 30 35 0.7996933
## 0.2 3 30 40 0.7997019
## 0.2 3 30 45 0.8044494
## 0.2 3 30 50 0.8050310
## 0.2 4 10 5 0.7908192
## 0.2 4 10 10 0.8017716
## 0.2 4 10 15 0.8044579
## 0.2 4 10 20 0.8056322
## 0.2 4 10 25 0.8106780
## 0.2 4 10 30 0.8118502
## 0.2 4 10 35 0.8068175
## 0.2 4 10 40 0.8088872
## 0.2 4 10 45 0.8103644
## 0.2 4 10 50 0.8050376
## 0.2 4 20 5 0.7831286
## 0.2 4 20 10 0.7896561
## 0.2 4 20 15 0.7943838
## 0.2 4 20 20 0.8005908
## 0.2 4 20 25 0.8009111
## 0.2 4 20 30 0.8017782
## 0.2 4 20 35 0.8059352
## 0.2 4 20 40 0.8062075
## 0.2 4 20 45 0.8068001
## 0.2 4 20 50 0.8062097
## 0.2 4 30 5 0.7822332
## 0.2 4 30 10 0.7745448
## 0.2 4 30 15 0.7858064
## 0.2 4 30 20 0.7878696
## 0.2 4 30 25 0.7952508
## 0.2 4 30 30 0.8005646
## 0.2 4 30 35 0.7967280
## 0.2 4 30 40 0.7988043
## 0.2 4 30 45 0.8044122
## 0.2 4 30 50 0.7996844
## 0.2 5 10 5 0.7952419
## 0.2 5 10 10 0.7964493
## 0.2 5 10 15 0.7976344
## 0.2 5 10 20 0.7985101
## 0.2 5 10 25 0.8038567
## 0.2 5 10 30 0.8065278
## 0.2 5 10 35 0.8071096
## 0.2 5 10 40 0.8062161
## 0.2 5 10 45 0.8026781
## 0.2 5 10 50 0.7988219
## 0.2 5 20 5 0.7789585
## 0.2 5 20 10 0.7878563
## 0.2 5 20 15 0.7955602
## 0.2 5 20 20 0.8026452
## 0.2 5 20 25 0.8091770
## 0.2 5 20 30 0.8068021
## 0.2 5 20 35 0.8038479
## 0.2 5 20 40 0.8047390
## 0.2 5 20 45 0.7997238
## 0.2 5 20 50 0.8012032
## 0.2 5 30 5 0.7775077
## 0.2 5 30 10 0.7760349
## 0.2 5 30 15 0.7863968
## 0.2 5 30 20 0.7908106
## 0.2 5 30 25 0.7967478
## 0.2 5 30 30 0.8032314
## 0.2 5 30 35 0.8017629
## 0.2 5 30 40 0.8035385
## 0.2 5 30 45 0.8044319
## 0.2 5 30 50 0.8079656
## 0.3 1 10 5 0.7695163
## 0.3 1 10 10 0.7736666
## 0.3 1 10 15 0.7751416
## 0.3 1 10 20 0.7854707
## 0.3 1 10 25 0.7908084
## 0.3 1 10 30 0.7914011
## 0.3 1 10 35 0.7890328
## 0.3 1 10 40 0.7902202
## 0.3 1 10 45 0.7896167
## 0.3 1 10 50 0.7878432
## 0.3 1 20 5 0.7692090
## 0.3 1 20 10 0.7650652
## 0.3 1 20 15 0.7727472
## 0.3 1 20 20 0.7795644
## 0.3 1 20 25 0.7878521
## 0.3 1 20 30 0.7884425
## 0.3 1 20 35 0.7869675
## 0.3 1 20 40 0.7851831
## 0.3 1 20 45 0.7857846
## 0.3 1 20 50 0.7881680
## 0.3 1 30 5 0.7692200
## 0.3 1 30 10 0.7656535
## 0.3 1 30 15 0.7742330
## 0.3 1 30 20 0.7780894
## 0.3 1 30 25 0.7795577
## 0.3 1 30 30 0.7769018
## 0.3 1 30 35 0.7828148
## 0.3 1 30 40 0.7825032
## 0.3 1 30 45 0.7845991
## 0.3 1 30 50 0.7834074
## 0.3 2 10 5 0.7816580
## 0.3 2 10 10 0.7991005
## 0.3 2 10 15 0.8044229
## 0.3 2 10 20 0.8005843
## 0.3 2 10 25 0.7988195
## 0.3 2 10 30 0.7952791
## 0.3 2 10 35 0.7973424
## 0.3 2 10 40 0.7999914
## 0.3 2 10 45 0.8026911
## 0.3 2 10 50 0.8023731
## 0.3 2 20 5 0.7780937
## 0.3 2 20 10 0.7905384
## 0.3 2 20 15 0.7952530
## 0.3 2 20 20 0.7973336
## 0.3 2 20 25 0.7976342
## 0.3 2 20 30 0.7979306
## 0.3 2 20 35 0.7982467
## 0.3 2 20 40 0.8012031
## 0.3 2 20 45 0.8050594
## 0.3 2 20 50 0.8038654
## 0.3 2 30 5 0.7748541
## 0.3 2 30 10 0.7795599
## 0.3 2 30 15 0.7828257
## 0.3 2 30 20 0.7831309
## 0.3 2 30 25 0.7825208
## 0.3 2 30 30 0.7801746
## 0.3 2 30 35 0.7884556
## 0.3 2 30 40 0.7937956
## 0.3 2 30 45 0.7961464
## 0.3 2 30 50 0.7928826
## 0.3 3 10 5 0.7849019
## 0.3 3 10 10 0.7958740
## 0.3 3 10 15 0.8000047
## 0.3 3 10 20 0.7985342
## 0.3 3 10 25 0.8020897
## 0.3 3 10 30 0.8014949
## 0.3 3 10 35 0.7964666
## 0.3 3 10 40 0.7976585
## 0.3 3 10 45 0.7967499
## 0.3 3 10 50 0.8009069
## 0.3 3 20 5 0.7763357
## 0.3 3 20 10 0.7929307
## 0.3 3 20 15 0.7967650
## 0.3 3 20 20 0.7988327
## 0.3 3 20 25 0.8044471
## 0.3 3 20 30 0.8047653
## 0.3 3 20 35 0.8053470
## 0.3 3 20 40 0.8032905
## 0.3 3 20 45 0.8050551
## 0.3 3 20 50 0.8106761
## 0.3 3 30 5 0.7807297
## 0.3 3 30 10 0.7822068
## 0.3 3 30 15 0.7925644
## 0.3 3 30 20 0.7916886
## 0.3 3 30 25 0.7991071
## 0.3 3 30 30 0.7943685
## 0.3 3 30 35 0.7979327
## 0.3 3 30 40 0.8023707
## 0.3 3 30 45 0.8014929
## 0.3 3 30 50 0.8097673
## 0.3 4 10 5 0.7893665
## 0.3 4 10 10 0.7976364
## 0.3 4 10 15 0.8014905
## 0.3 4 10 20 0.8000308
## 0.3 4 10 25 0.8020721
## 0.3 4 10 30 0.8082728
## 0.3 4 10 35 0.8017737
## 0.3 4 10 40 0.8011746
## 0.3 4 10 45 0.8008828
## 0.3 4 10 50 0.7952618
## 0.3 4 20 5 0.7766188
## 0.3 4 20 10 0.7988065
## 0.3 4 20 15 0.8020615
## 0.3 4 20 20 0.8032511
## 0.3 4 20 25 0.8059155
## 0.3 4 20 30 0.8020899
## 0.3 4 20 35 0.8044559
## 0.3 4 20 40 0.8050464
## 0.3 4 20 45 0.8023709
## 0.3 4 20 50 0.8047238
## 0.3 4 30 5 0.7807692
## 0.3 4 30 10 0.7807429
## 0.3 4 30 15 0.7908083
## 0.3 4 30 20 0.8002791
## 0.3 4 30 25 0.8023686
## 0.3 4 30 30 0.7958411
## 0.3 4 30 35 0.8058979
## 0.3 4 30 40 0.8064971
## 0.3 4 30 45 0.8076889
## 0.3 4 30 50 0.8064886
## 0.3 5 10 5 0.7949631
## 0.3 5 10 10 0.7976430
## 0.3 5 10 15 0.8032662
## 0.3 5 10 20 0.8029721
## 0.3 5 10 25 0.8029807
## 0.3 5 10 30 0.8009067
## 0.3 5 10 35 0.8002967
## 0.3 5 10 40 0.8014687
## 0.3 5 10 45 0.8047368
## 0.3 5 10 50 0.7979131
## 0.3 5 20 5 0.7858084
## 0.3 5 20 10 0.8000134
## 0.3 5 20 15 0.7987889
## 0.3 5 20 20 0.8032772
## 0.3 5 20 25 0.8041531
## 0.3 5 20 30 0.8047390
## 0.3 5 20 35 0.8011878
## 0.3 5 20 40 0.8044580
## 0.3 5 20 45 0.8068241
## 0.3 5 20 50 0.8029437
## 0.3 5 30 5 0.7813617
## 0.3 5 30 10 0.7795929
## 0.3 5 30 15 0.7929002
## 0.3 5 30 20 0.7955537
## 0.3 5 30 25 0.7976235
## 0.3 5 30 30 0.8032576
## 0.3 5 30 35 0.8062118
## 0.3 5 30 40 0.8065016
## 0.3 5 30 45 0.8005908
## 0.3 5 30 50 0.7996977
## Kappa Accuracy SD Kappa SD
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5191203 0.03234106 0.06854046
## 0.5197694 0.03238810 0.06812746
## 0.5292037 0.03001320 0.06363436
## 0.5266549 0.03229885 0.06775505
## 0.5357588 0.03152644 0.06658660
## 0.5479427 0.03267741 0.06863636
## 0.5129120 0.03308138 0.07087347
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5125371 0.03207206 0.06827715
## 0.5217805 0.03423990 0.07265939
## 0.5227742 0.03461667 0.07339528
## 0.5187636 0.03172253 0.06657126
## 0.5219998 0.03306395 0.06978850
## 0.5388186 0.03601169 0.07497780
## 0.5129120 0.03308138 0.07087347
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5183773 0.03393878 0.07226845
## 0.5187623 0.03328308 0.07037158
## 0.5167253 0.03265732 0.06875086
## 0.5073610 0.03348434 0.07141457
## 0.5043920 0.03297063 0.07052438
## 0.5098637 0.03893390 0.08331708
## 0.5193747 0.03390441 0.07247806
## 0.5210296 0.03281182 0.07589674
## 0.5196404 0.03272431 0.07331539
## 0.5295893 0.03507608 0.07675815
## 0.5550468 0.03064376 0.06981859
## 0.5635714 0.03321750 0.07440048
## 0.5688584 0.03679681 0.08065808
## 0.5697985 0.03534985 0.07761932
## 0.5737398 0.03632389 0.07937564
## 0.5730305 0.03613155 0.07896953
## 0.5764080 0.04051097 0.08724491
## 0.5210296 0.03281182 0.07589674
## 0.5121614 0.03047265 0.07121906
## 0.5212090 0.03068615 0.07152340
## 0.5319495 0.03068007 0.06994657
## 0.5346366 0.03246633 0.07218970
## 0.5489193 0.03396812 0.07489160
## 0.5511353 0.03399065 0.07388498
## 0.5594119 0.03750440 0.08189125
## 0.5680006 0.03801944 0.08377953
## 0.5694889 0.03890836 0.08560819
## 0.5210296 0.03281182 0.07589674
## 0.5143389 0.03198798 0.07338106
## 0.5044280 0.03512856 0.07966227
## 0.5105136 0.03012137 0.06749083
## 0.5232800 0.03980970 0.08676608
## 0.5238435 0.03922048 0.08605167
## 0.5272517 0.03680009 0.08102951
## 0.5307358 0.03935275 0.08728833
## 0.5313922 0.04128408 0.09117526
## 0.5368732 0.04133045 0.09132017
## 0.5236322 0.03238332 0.07452237
## 0.5276644 0.03490517 0.07812823
## 0.5509909 0.03218069 0.07247623
## 0.5795665 0.03329984 0.07493349
## 0.5795425 0.03319484 0.07378773
## 0.5797838 0.03769627 0.08314820
## 0.5788753 0.03905971 0.08568866
## 0.5825189 0.03668385 0.08062980
## 0.5778401 0.04044120 0.08778295
## 0.5799190 0.04021630 0.08779683
## 0.5210296 0.03281182 0.07589674
## 0.5217859 0.03379657 0.07864503
## 0.5264079 0.03404875 0.07724029
## 0.5446845 0.03824793 0.08301641
## 0.5680657 0.03954969 0.08666314
## 0.5746614 0.03586512 0.07915909
## 0.5754097 0.03432135 0.07664862
## 0.5791768 0.03658954 0.08028893
## 0.5802257 0.03853887 0.08369594
## 0.5842352 0.03969796 0.08619462
## 0.5210296 0.03281182 0.07589674
## 0.5211282 0.03281182 0.07590869
## 0.5143273 0.03586893 0.08118366
## 0.5237615 0.03355769 0.07519913
## 0.5276281 0.03688102 0.08153954
## 0.5263837 0.03831999 0.08439204
## 0.5317599 0.03688078 0.08101987
## 0.5389632 0.03608005 0.07966941
## 0.5425207 0.03653517 0.08058609
## 0.5514920 0.03306540 0.07272810
## 0.5204990 0.03442250 0.07799758
## 0.5482350 0.03152948 0.07239077
## 0.5661017 0.03355598 0.07590924
## 0.5871777 0.03965501 0.08756962
## 0.5829752 0.04061988 0.08861809
## 0.5874944 0.03543665 0.07770856
## 0.5899804 0.04210912 0.09211458
## 0.5903280 0.04254705 0.09274102
## 0.5855795 0.04364052 0.09388850
## 0.5896841 0.04181467 0.09022895
## 0.5210296 0.03281182 0.07589674
## 0.5228649 0.03344102 0.07686302
## 0.5356187 0.03448879 0.07813136
## 0.5423391 0.04277478 0.09388909
## 0.5654346 0.04031012 0.08767534
## 0.5771612 0.04193320 0.09217213
## 0.5822795 0.04398321 0.09571264
## 0.5804765 0.04081500 0.08854718
## 0.5937126 0.04037305 0.08676155
## 0.5999357 0.04228673 0.09031525
## 0.5210296 0.03281182 0.07589674
## 0.5205376 0.03190186 0.07440786
## 0.5228057 0.03284034 0.07551231
## 0.5242798 0.03508364 0.07893724
## 0.5332382 0.03613066 0.08126921
## 0.5424308 0.03676733 0.08159230
## 0.5454652 0.03534139 0.07856561
## 0.5573948 0.03927584 0.08663164
## 0.5633977 0.03932614 0.08591894
## 0.5687352 0.04151075 0.09032322
## 0.5366426 0.03717174 0.08339964
## 0.5591298 0.03359876 0.07410411
## 0.5750715 0.03517323 0.07747841
## 0.5837964 0.03636965 0.07896338
## 0.5880846 0.03627035 0.07910978
## 0.5827740 0.03840365 0.08362873
## 0.5852619 0.03807153 0.08344146
## 0.5931229 0.03613146 0.07719163
## 0.5983229 0.03586319 0.07633857
## 0.5960238 0.03814359 0.08171305
## 0.5195215 0.03365416 0.07764080
## 0.5177603 0.03033465 0.07016951
## 0.5349548 0.03418802 0.07697207
## 0.5495220 0.03651616 0.08086185
## 0.5639333 0.04100075 0.09030176
## 0.5710154 0.03734297 0.08234066
## 0.5737560 0.03723285 0.08191723
## 0.5814023 0.04163711 0.09149771
## 0.5914669 0.03826183 0.08343730
## 0.5896858 0.03915350 0.08469542
## 0.5210296 0.03281182 0.07589674
## 0.5201440 0.03261634 0.07587571
## 0.5166205 0.03404089 0.07970591
## 0.5329872 0.03647345 0.08310136
## 0.5389088 0.03407723 0.07744444
## 0.5515756 0.03661211 0.08031136
## 0.5621379 0.04050150 0.08820487
## 0.5662897 0.03834789 0.08338702
## 0.5675135 0.04088093 0.08815303
## 0.5708063 0.04090293 0.08772237
## 0.5169710 0.03374533 0.07158638
## 0.5162331 0.03269375 0.06943390
## 0.5245889 0.03070576 0.06715717
## 0.5355876 0.03486774 0.07256399
## 0.5492955 0.03216373 0.06772441
## 0.5608190 0.03440854 0.07130463
## 0.5691649 0.03315484 0.06908724
## 0.5625317 0.03483139 0.07193866
## 0.5660156 0.03527126 0.07450274
## 0.5586791 0.03683458 0.07672224
## 0.5172302 0.03393878 0.07222091
## 0.5169430 0.03409502 0.07211000
## 0.5240244 0.03557313 0.07488809
## 0.5280154 0.03471953 0.07364855
## 0.5404931 0.03558414 0.07557249
## 0.5481914 0.03740993 0.08046742
## 0.5509045 0.03610213 0.07657604
## 0.5635013 0.03199403 0.06629718
## 0.5661979 0.03208350 0.06716770
## 0.5619031 0.02674801 0.05608425
## 0.5183773 0.03393878 0.07226845
## 0.5169710 0.03374533 0.07158638
## 0.5125321 0.03600090 0.07978609
## 0.5147364 0.03493376 0.07436015
## 0.5207422 0.03795410 0.07978023
## 0.5389725 0.03502212 0.07350799
## 0.5466634 0.03728317 0.07734130
## 0.5424862 0.03634710 0.07594400
## 0.5499888 0.03600662 0.07591392
## 0.5487772 0.03527150 0.07420066
## 0.5151364 0.03460563 0.07336020
## 0.5538143 0.03712949 0.08119513
## 0.5724385 0.03689205 0.08133921
## 0.5773178 0.03612364 0.07962547
## 0.5740210 0.03918786 0.08629756
## 0.5759474 0.03987098 0.08617559
## 0.5822817 0.03960086 0.08590261
## 0.5804765 0.03985016 0.08644292
## 0.5782216 0.04412113 0.09537168
## 0.5765517 0.04134973 0.08918746
## 0.5098323 0.03443781 0.07665337
## 0.5288048 0.03996181 0.08776722
## 0.5594083 0.03570276 0.07947883
## 0.5724996 0.03780955 0.08337814
## 0.5752432 0.03558608 0.07731139
## 0.5751618 0.03482490 0.07598149
## 0.5731883 0.03248281 0.07174552
## 0.5807215 0.03461498 0.07485997
## 0.5818974 0.03608299 0.07755620
## 0.5916328 0.03971055 0.08536831
## 0.5120512 0.03311360 0.07302380
## 0.5114586 0.03571320 0.07688703
## 0.5284180 0.03577555 0.07747355
## 0.5319119 0.03714831 0.08035902
## 0.5360551 0.03769851 0.08012579
## 0.5449478 0.03594037 0.07748335
## 0.5513411 0.03999050 0.08638223
## 0.5563151 0.03910416 0.08460597
## 0.5624419 0.04117473 0.08954341
## 0.5694958 0.03881296 0.08388195
## 0.5177990 0.03421471 0.07375515
## 0.5640656 0.03798410 0.08092939
## 0.5798566 0.03466880 0.07506243
## 0.5782000 0.04111478 0.08872517
## 0.5901665 0.04483206 0.09609667
## 0.5916416 0.03778544 0.08048460
## 0.5993856 0.03511544 0.07528379
## 0.6043174 0.03860621 0.08277416
## 0.5961063 0.04054591 0.08697952
## 0.5963522 0.03387993 0.07235589
## 0.5176038 0.03325828 0.07688195
## 0.5465009 0.03936737 0.08729711
## 0.5715992 0.04143881 0.09038113
## 0.5687730 0.03515260 0.07676421
## 0.5754229 0.03956888 0.08532286
## 0.5890040 0.03771307 0.08116247
## 0.5846145 0.03944418 0.08384134
## 0.6002162 0.04087528 0.08719130
## 0.5934902 0.04227046 0.09028416
## 0.5948964 0.04315231 0.09226684
## 0.5152369 0.03341526 0.07674052
## 0.5134326 0.03320953 0.07510068
## 0.5335568 0.02926585 0.06764545
## 0.5382668 0.03257208 0.07217027
## 0.5566589 0.03333004 0.07278096
## 0.5602561 0.03725714 0.08064917
## 0.5761592 0.03810637 0.08299231
## 0.5772956 0.04030138 0.08668568
## 0.5876205 0.03888818 0.08439298
## 0.5890094 0.03961574 0.08577761
## 0.5509863 0.03992643 0.08878486
## 0.5780249 0.03826006 0.08197373
## 0.5858111 0.03967289 0.08394771
## 0.5901439 0.03660329 0.07725096
## 0.6021863 0.03712531 0.07782299
## 0.6036144 0.03781755 0.08098577
## 0.5937869 0.03519088 0.07487157
## 0.5982164 0.04154017 0.08860018
## 0.6019594 0.04097429 0.08774003
## 0.5911334 0.03944279 0.08385222
## 0.5254849 0.03347740 0.07709293
## 0.5468191 0.03478796 0.07894762
## 0.5635063 0.03358091 0.07253656
## 0.5781834 0.03480269 0.07536725
## 0.5802188 0.03902883 0.08371333
## 0.5824465 0.03997173 0.08605627
## 0.5911436 0.03767634 0.08094850
## 0.5918902 0.03724839 0.07964735
## 0.5937591 0.03639199 0.07726405
## 0.5931066 0.03680520 0.07810488
## 0.5220965 0.03391652 0.07865779
## 0.5144430 0.03136557 0.07135533
## 0.5430473 0.03650011 0.08003451
## 0.5497814 0.03965942 0.08609346
## 0.5663920 0.04107202 0.08783240
## 0.5794407 0.03665504 0.07848265
## 0.5716394 0.03972462 0.08530502
## 0.5758561 0.03762483 0.08120198
## 0.5883612 0.03199938 0.06844049
## 0.5784763 0.03602444 0.07732959
## 0.5576538 0.03473094 0.08018002
## 0.5679164 0.03516879 0.07750935
## 0.5721984 0.04283439 0.09362600
## 0.5764187 0.04339775 0.09237538
## 0.5866252 0.04216237 0.09185605
## 0.5931450 0.04302776 0.09235933
## 0.5954580 0.03768359 0.08123017
## 0.5939111 0.03249291 0.07100813
## 0.5866180 0.03705052 0.07874142
## 0.5792904 0.03433515 0.07412373
## 0.5193730 0.03584080 0.08152691
## 0.5450906 0.04064278 0.08958551
## 0.5672573 0.03953345 0.08561184
## 0.5823305 0.03916616 0.08467624
## 0.5976332 0.03794928 0.08048823
## 0.5934562 0.03748358 0.07956714
## 0.5879086 0.03700696 0.07830587
## 0.5895409 0.03621346 0.07695299
## 0.5791640 0.03237916 0.06950094
## 0.5826546 0.03544921 0.07528752
## 0.5126078 0.03327680 0.07654340
## 0.5188632 0.03681436 0.08227781
## 0.5447337 0.04201053 0.09206103
## 0.5553937 0.03926278 0.08654355
## 0.5701376 0.04494570 0.09631607
## 0.5844844 0.04114056 0.08873242
## 0.5819877 0.04512505 0.09740622
## 0.5857281 0.04002385 0.08673185
## 0.5879042 0.04219032 0.09091708
## 0.5955157 0.03835778 0.08319063
## 0.5173230 0.03510666 0.07406089
## 0.5263093 0.03661521 0.07845586
## 0.5302914 0.03918775 0.08245652
## 0.5533769 0.04193622 0.08725709
## 0.5637770 0.04206413 0.08659977
## 0.5652285 0.04068528 0.08564903
## 0.5611792 0.04267703 0.08903293
## 0.5624020 0.04427843 0.09367776
## 0.5616826 0.04202142 0.09026935
## 0.5578672 0.03872668 0.08099412
## 0.5163474 0.03874189 0.08142296
## 0.5083774 0.03806797 0.07969086
## 0.5250499 0.03440642 0.07345228
## 0.5411946 0.03322540 0.06947629
## 0.5582229 0.03128788 0.06635300
## 0.5590008 0.03705383 0.07747178
## 0.5561777 0.03354009 0.06951671
## 0.5528666 0.03321974 0.06868763
## 0.5527689 0.03429193 0.07161825
## 0.5588035 0.03654684 0.07654890
## 0.5183773 0.03393878 0.07226845
## 0.5066314 0.03303584 0.07170006
## 0.5280664 0.04124586 0.08485467
## 0.5362309 0.03694530 0.07928800
## 0.5393689 0.03855971 0.08176881
## 0.5327965 0.03929538 0.08327213
## 0.5458970 0.03722415 0.07779203
## 0.5461336 0.03982100 0.08489335
## 0.5497313 0.03813788 0.08033136
## 0.5468864 0.03766296 0.07937718
## 0.5325504 0.04017198 0.08873467
## 0.5698391 0.03441056 0.07765143
## 0.5841508 0.03645767 0.07969209
## 0.5783758 0.03929615 0.08568416
## 0.5753212 0.03704756 0.08080705
## 0.5687679 0.04008192 0.08537032
## 0.5733113 0.04185164 0.09130995
## 0.5791768 0.03558646 0.07655883
## 0.5857847 0.03439634 0.07326127
## 0.5854023 0.03541230 0.07490209
## 0.5197226 0.03122924 0.07071840
## 0.5506291 0.03480891 0.07801667
## 0.5654541 0.03719945 0.08181575
## 0.5706060 0.03744835 0.08234192
## 0.5714652 0.04449873 0.09559615
## 0.5728319 0.04073704 0.08825683
## 0.5740299 0.04263504 0.09236888
## 0.5810442 0.03913269 0.08428265
## 0.5896073 0.03992316 0.08592560
## 0.5873714 0.04108527 0.08779392
## 0.5137135 0.03146380 0.07129480
## 0.5277143 0.03833634 0.08308231
## 0.5365923 0.03917834 0.08486242
## 0.5384600 0.04131852 0.08942134
## 0.5384556 0.03503511 0.07625280
## 0.5347983 0.04224760 0.09165276
## 0.5520973 0.04081726 0.08841518
## 0.5643723 0.03757131 0.08082400
## 0.5697999 0.04217028 0.09021698
## 0.5637296 0.04227767 0.09004172
## 0.5375323 0.03410659 0.07570885
## 0.5667587 0.04158580 0.08918644
## 0.5782773 0.03389681 0.07285345
## 0.5757450 0.03953227 0.08546941
## 0.5841031 0.03632392 0.07881900
## 0.5831806 0.03550018 0.07566367
## 0.5731353 0.03546272 0.07551706
## 0.5765515 0.03365441 0.07061175
## 0.5748990 0.03368857 0.07063089
## 0.5836300 0.03780360 0.07907016
## 0.5176877 0.03303860 0.07474052
## 0.5594043 0.03374972 0.07443424
## 0.5700692 0.04089944 0.08831211
## 0.5751357 0.03777140 0.08125848
## 0.5872933 0.03807141 0.08222702
## 0.5889305 0.04001726 0.08603474
## 0.5903879 0.03964014 0.08422658
## 0.5868624 0.04026519 0.08596284
## 0.5900932 0.04228541 0.09058700
## 0.6014563 0.04352503 0.09410263
## 0.5238863 0.03568051 0.08196458
## 0.5352401 0.03989012 0.08855209
## 0.5584502 0.04350113 0.09614454
## 0.5597404 0.04374462 0.09481454
## 0.5756727 0.03775187 0.08119077
## 0.5666469 0.04125414 0.08845772
## 0.5743438 0.04110343 0.08856086
## 0.5847041 0.03663583 0.07786538
## 0.5827792 0.03563940 0.07715386
## 0.6003405 0.03701778 0.07932249
## 0.5504019 0.03053928 0.06766362
## 0.5718700 0.04151282 0.08999472
## 0.5822923 0.04045436 0.08661237
## 0.5800868 0.04193850 0.08931330
## 0.5850621 0.03625584 0.07751109
## 0.5980514 0.03152014 0.06747862
## 0.5844063 0.03626072 0.07741476
## 0.5843031 0.03365481 0.07117046
## 0.5841121 0.03763028 0.08003716
## 0.5721526 0.03905111 0.08237885
## 0.5200005 0.03691389 0.08206691
## 0.5741727 0.03541807 0.07702171
## 0.5832893 0.03726587 0.08043449
## 0.5864972 0.03627496 0.07876949
## 0.5915313 0.03397767 0.07338427
## 0.5853462 0.03308261 0.07077401
## 0.5905677 0.03408001 0.07216388
## 0.5918596 0.03413789 0.07254356
## 0.5860294 0.03682606 0.07881973
## 0.5910720 0.03555247 0.07517700
## 0.5269860 0.03579981 0.07883735
## 0.5330094 0.03061641 0.06687986
## 0.5569869 0.03024174 0.06575329
## 0.5787363 0.03433392 0.07473972
## 0.5845707 0.03280745 0.07011649
## 0.5711026 0.02985536 0.06347885
## 0.5926910 0.03196354 0.06819938
## 0.5946017 0.03117025 0.06646160
## 0.5969273 0.03327437 0.07167926
## 0.5944426 0.03764294 0.08125867
## 0.5642591 0.03509784 0.07621106
## 0.5726951 0.03874634 0.08313882
## 0.5867072 0.04308909 0.08995230
## 0.5873029 0.04068917 0.08586542
## 0.5867448 0.04116462 0.08711239
## 0.5834792 0.03637273 0.07761817
## 0.5817994 0.03480958 0.07324626
## 0.5850144 0.03450280 0.07208284
## 0.5926059 0.03896494 0.08162206
## 0.5786861 0.03940778 0.08163989
## 0.5385558 0.03535088 0.08200617
## 0.5769759 0.03544124 0.07719127
## 0.5771997 0.03541039 0.07539439
## 0.5869317 0.04212114 0.09025827
## 0.5905111 0.03548789 0.07552194
## 0.5917737 0.03609223 0.07706930
## 0.5841522 0.04008411 0.08544700
## 0.5910386 0.03791604 0.08045472
## 0.5962463 0.03756961 0.08026425
## 0.5883126 0.03541171 0.07591678
## 0.5272089 0.03199805 0.07369057
## 0.5314608 0.03926174 0.08501290
## 0.5629267 0.03993214 0.08546334
## 0.5695637 0.03926128 0.08264877
## 0.5733012 0.03699451 0.07883112
## 0.5853242 0.03761768 0.08007517
## 0.5917886 0.03665754 0.07782738
## 0.5937817 0.03653243 0.07764409
## 0.5815142 0.04189606 0.08997057
## 0.5805268 0.03831341 0.08099892
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were n.trees = 40, interaction.depth
## = 3, shrinkage = 0.2 and n.minobsinnode = 10.
##gfbfit fits best
We compute the importance of the variables to check for further potential to improve predictive accuaracy.
varImp(gbmFit)
## gbm variable importance
##
## Overall
## Sexmale 100.00000
## Age 60.93041
## Fare 39.87357
## Pclass3 38.51935
## SibSp 10.70351
## EmbarkedC 1.72800
## Pclass2 1.62879
## Parch 0.48448
## EmbarkedQ 0.05637
## EmbarkedS 0.00000
Based on these results, there exist evidence to remove variables Embarked and Parch
formula = Survived ~ Pclass + Sex + Age + SibSp + Fare
gbmFit <- train(formula, data = training_na,
method = "gbm",
trControl = fitControl,
## This last option is actually one
## for gbm() that passes through
verbose = FALSE,
tuneGrid = gbmGrid)
gbmFit
## Stochastic Gradient Boosting
##
## 676 samples
## 8 predictor
## 2 classes: '0', '1'
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 540, 542, 541, 540, 541, 541, ...
## Resampling results across tuning parameters:
##
## shrinkage interaction.depth n.minobsinnode n.trees Accuracy
## 0.1 1 10 5 0.7650465
## 0.1 1 10 10 0.7647480
## 0.1 1 10 15 0.7653406
## 0.1 1 10 20 0.7665171
## 0.1 1 10 25 0.7676979
## 0.1 1 10 30 0.7694801
## 0.1 1 10 35 0.7733232
## 0.1 1 10 40 0.7712513
## 0.1 1 10 45 0.7783254
## 0.1 1 10 50 0.7848593
## 0.1 1 20 5 0.7679877
## 0.1 1 20 10 0.7676827
## 0.1 1 20 15 0.7667938
## 0.1 1 20 20 0.7688635
## 0.1 1 20 25 0.7706413
## 0.1 1 20 30 0.7700487
## 0.1 1 20 35 0.7721162
## 0.1 1 20 40 0.7706565
## 0.1 1 20 45 0.7727328
## 0.1 1 20 50 0.7736238
## 0.1 1 30 5 0.7653406
## 0.1 1 30 10 0.7691641
## 0.1 1 30 15 0.7653406
## 0.1 1 30 20 0.7674038
## 0.1 1 30 25 0.7679920
## 0.1 1 30 30 0.7670966
## 0.1 1 30 35 0.7656195
## 0.1 1 30 40 0.7653384
## 0.1 1 30 45 0.7665280
## 0.1 1 30 50 0.7694866
## 0.1 2 10 5 0.7798879
## 0.1 2 10 10 0.7698026
## 0.1 2 10 15 0.7739267
## 0.1 2 10 20 0.7818919
## 0.1 2 10 25 0.7904889
## 0.1 2 10 30 0.7990795
## 0.1 2 10 35 0.8005697
## 0.1 2 10 40 0.7999945
## 0.1 2 10 45 0.7985109
## 0.1 2 10 50 0.7993998
## 0.1 2 20 5 0.7825546
## 0.1 2 20 10 0.7739310
## 0.1 2 20 15 0.7686043
## 0.1 2 20 20 0.7774626
## 0.1 2 20 25 0.7848613
## 0.1 2 20 30 0.7890161
## 0.1 2 20 35 0.7940752
## 0.1 2 20 40 0.7991057
## 0.1 2 20 45 0.7970315
## 0.1 2 20 50 0.8005807
## 0.1 2 30 5 0.7825546
## 0.1 2 30 10 0.7781101
## 0.1 2 30 15 0.7721554
## 0.1 2 30 20 0.7709746
## 0.1 2 30 25 0.7742274
## 0.1 2 30 30 0.7810226
## 0.1 2 30 35 0.7804431
## 0.1 2 30 40 0.7842863
## 0.1 2 30 45 0.7807373
## 0.1 2 30 50 0.7831164
## 0.1 3 10 5 0.7804563
## 0.1 3 10 10 0.7774824
## 0.1 3 10 15 0.7901948
## 0.1 3 10 20 0.8011536
## 0.1 3 10 25 0.8035305
## 0.1 3 10 30 0.8044304
## 0.1 3 10 35 0.8029576
## 0.1 3 10 40 0.8064913
## 0.1 3 10 45 0.8064957
## 0.1 3 10 50 0.8044391
## 0.1 3 20 5 0.7825546
## 0.1 3 20 10 0.7748177
## 0.1 3 20 15 0.7756958
## 0.1 3 20 20 0.7896000
## 0.1 3 20 25 0.7919573
## 0.1 3 20 30 0.7970162
## 0.1 3 20 35 0.8005784
## 0.1 3 20 40 0.8017657
## 0.1 3 20 45 0.8011579
## 0.1 3 20 50 0.7999793
## 0.1 3 30 5 0.7825546
## 0.1 3 30 10 0.7745236
## 0.1 3 30 15 0.7751295
## 0.1 3 30 20 0.7813343
## 0.1 3 30 25 0.7798549
## 0.1 3 30 30 0.7834061
## 0.1 3 30 35 0.7837024
## 0.1 3 30 40 0.7857722
## 0.1 3 30 45 0.7860684
## 0.1 3 30 50 0.7866567
## 0.1 4 10 5 0.7822473
## 0.1 4 10 10 0.7887439
## 0.1 4 10 15 0.8032517
## 0.1 4 10 20 0.8100360
## 0.1 4 10 25 0.8106395
## 0.1 4 10 30 0.8112343
## 0.1 4 10 35 0.8085850
## 0.1 4 10 40 0.8100513
## 0.1 4 10 45 0.8142082
## 0.1 4 10 50 0.8127224
## 0.1 4 20 5 0.7813694
## 0.1 4 20 10 0.7822297
## 0.1 4 20 15 0.7842753
## 0.1 4 20 20 0.7893168
## 0.1 4 20 25 0.7916916
## 0.1 4 20 30 0.7952407
## 0.1 4 20 35 0.7990948
## 0.1 4 20 40 0.8008813
## 0.1 4 20 45 0.8029511
## 0.1 4 20 50 0.8082626
## 0.1 4 30 5 0.7825546
## 0.1 4 30 10 0.7775065
## 0.1 4 30 15 0.7754105
## 0.1 4 30 20 0.7768875
## 0.1 4 30 25 0.7777743
## 0.1 4 30 30 0.7851796
## 0.1 4 30 35 0.7866698
## 0.1 4 30 40 0.7919812
## 0.1 4 30 45 0.7940619
## 0.1 4 30 50 0.7982013
## 0.1 5 10 5 0.7845980
## 0.1 5 10 10 0.7943409
## 0.1 5 10 15 0.8029337
## 0.1 5 10 20 0.8061995
## 0.1 5 10 25 0.8064979
## 0.1 5 10 30 0.8091603
## 0.1 5 10 35 0.8100536
## 0.1 5 10 40 0.8127115
## 0.1 5 10 45 0.8162649
## 0.1 5 10 50 0.8144849
## 0.1 5 20 5 0.7822583
## 0.1 5 20 10 0.7801797
## 0.1 5 20 15 0.7887351
## 0.1 5 20 20 0.7890226
## 0.1 5 20 25 0.7967178
## 0.1 5 20 30 0.8005741
## 0.1 5 20 35 0.8070927
## 0.1 5 20 40 0.8094499
## 0.1 5 20 45 0.8044260
## 0.1 5 20 50 0.8106352
## 0.1 5 30 5 0.7825546
## 0.1 5 30 10 0.7789769
## 0.1 5 30 15 0.7789682
## 0.1 5 30 20 0.7786588
## 0.1 5 30 25 0.7830945
## 0.1 5 30 30 0.7831055
## 0.1 5 30 35 0.7860751
## 0.1 5 30 40 0.7905020
## 0.1 5 30 45 0.7931687
## 0.1 5 30 50 0.7970227
## 0.2 1 10 5 0.7653406
## 0.2 1 10 10 0.7656326
## 0.2 1 10 15 0.7670988
## 0.2 1 10 20 0.7739027
## 0.2 1 10 25 0.7804190
## 0.2 1 10 30 0.7780531
## 0.2 1 10 35 0.7863495
## 0.2 1 10 40 0.7842907
## 0.2 1 10 45 0.7878420
## 0.2 1 10 50 0.7860685
## 0.2 1 20 5 0.7644517
## 0.2 1 20 10 0.7656304
## 0.2 1 20 15 0.7709463
## 0.2 1 20 20 0.7744931
## 0.2 1 20 25 0.7792362
## 0.2 1 20 30 0.7771752
## 0.2 1 20 35 0.7831099
## 0.2 1 20 40 0.7816262
## 0.2 1 20 45 0.7825151
## 0.2 1 20 50 0.7866524
## 0.2 1 30 5 0.7673885
## 0.2 1 30 10 0.7668112
## 0.2 1 30 15 0.7671162
## 0.2 1 30 20 0.7694844
## 0.2 1 30 25 0.7715498
## 0.2 1 30 30 0.7727415
## 0.2 1 30 35 0.7762927
## 0.2 1 30 40 0.7745171
## 0.2 1 30 45 0.7745171
## 0.2 1 30 50 0.7754060
## 0.2 2 10 5 0.7727371
## 0.2 2 10 10 0.7834039
## 0.2 2 10 15 0.8008639
## 0.2 2 10 20 0.7996851
## 0.2 2 10 25 0.7996743
## 0.2 2 10 30 0.8011580
## 0.2 2 10 35 0.8023606
## 0.2 2 10 40 0.7999967
## 0.2 2 10 45 0.7996917
## 0.2 2 10 50 0.8014652
## 0.2 2 20 5 0.7739267
## 0.2 2 20 10 0.7774671
## 0.2 2 20 15 0.7851512
## 0.2 2 20 20 0.7896196
## 0.2 2 20 25 0.7973082
## 0.2 2 20 30 0.7979008
## 0.2 2 20 35 0.8011798
## 0.2 2 20 40 0.8023606
## 0.2 2 20 45 0.7997005
## 0.2 2 20 50 0.7988094
## 0.2 2 30 5 0.7724474
## 0.2 2 30 10 0.7689225
## 0.2 2 30 15 0.7724519
## 0.2 2 30 20 0.7792930
## 0.2 2 30 25 0.7846045
## 0.2 2 30 30 0.7843016
## 0.2 2 30 35 0.7834367
## 0.2 2 30 40 0.7887657
## 0.2 2 30 45 0.7943823
## 0.2 2 30 50 0.7946743
## 0.2 3 10 5 0.7854912
## 0.2 3 10 10 0.7973169
## 0.2 3 10 15 0.8020643
## 0.2 3 10 20 0.8017658
## 0.2 3 10 25 0.8053170
## 0.2 3 10 30 0.8050119
## 0.2 3 10 35 0.8061883
## 0.2 3 10 40 0.8050163
## 0.2 3 10 45 0.8088594
## 0.2 3 10 50 0.8070970
## 0.2 3 20 5 0.7783733
## 0.2 3 20 10 0.7848788
## 0.2 3 20 15 0.7893257
## 0.2 3 20 20 0.7982124
## 0.2 3 20 25 0.8002996
## 0.2 3 20 30 0.8035327
## 0.2 3 20 35 0.8094871
## 0.2 3 20 40 0.8112431
## 0.2 3 20 45 0.8109489
## 0.2 3 20 50 0.8100491
## 0.2 3 30 5 0.7792821
## 0.2 3 30 10 0.7703929
## 0.2 3 30 15 0.7863712
## 0.2 3 30 20 0.7807592
## 0.2 3 30 25 0.7907961
## 0.2 3 30 30 0.7958288
## 0.2 3 30 35 0.7928900
## 0.2 3 30 40 0.7988006
## 0.2 3 30 45 0.7958377
## 0.2 3 30 50 0.8032298
## 0.2 4 10 5 0.7887199
## 0.2 4 10 10 0.8047092
## 0.2 4 10 15 0.7990927
## 0.2 4 10 20 0.8017724
## 0.2 4 10 25 0.8097550
## 0.2 4 10 30 0.8097507
## 0.2 4 10 35 0.8130209
## 0.2 4 10 40 0.8085677
## 0.2 4 10 45 0.8094872
## 0.2 4 10 50 0.8115481
## 0.2 4 20 5 0.7789814
## 0.2 4 20 10 0.7843104
## 0.2 4 20 15 0.7946524
## 0.2 4 20 20 0.8008616
## 0.2 4 20 25 0.8064825
## 0.2 4 20 30 0.8088486
## 0.2 4 20 35 0.8112146
## 0.2 4 20 40 0.8180076
## 0.2 4 20 45 0.8162495
## 0.2 4 20 50 0.8159532
## 0.2 4 30 5 0.7766088
## 0.2 4 30 10 0.7768985
## 0.2 4 30 15 0.7872515
## 0.2 4 30 20 0.7916718
## 0.2 4 30 25 0.7984845
## 0.2 4 30 30 0.8035150
## 0.2 4 30 35 0.8064760
## 0.2 4 30 40 0.8088441
## 0.2 4 30 45 0.8053083
## 0.2 4 30 50 0.8082561
## 0.2 5 10 5 0.7893323
## 0.2 5 10 10 0.8005654
## 0.2 5 10 15 0.8088772
## 0.2 5 10 20 0.8126940
## 0.2 5 10 25 0.8127049
## 0.2 5 10 30 0.8115416
## 0.2 5 10 35 0.8044370
## 0.2 5 10 40 0.8059075
## 0.2 5 10 45 0.8038400
## 0.2 5 10 50 0.8068030
## 0.2 5 20 5 0.7807591
## 0.2 5 20 10 0.7911013
## 0.2 5 20 15 0.7952517
## 0.2 5 20 20 0.8064847
## 0.2 5 20 25 0.8103322
## 0.2 5 20 30 0.8147877
## 0.2 5 20 35 0.8165677
## 0.2 5 20 40 0.8204000
## 0.2 5 20 45 0.8144870
## 0.2 5 20 50 0.8136069
## 0.2 5 30 5 0.7798703
## 0.2 5 30 10 0.7739333
## 0.2 5 30 15 0.7795609
## 0.2 5 30 20 0.7872471
## 0.2 5 30 25 0.7943583
## 0.2 5 30 30 0.7979073
## 0.2 5 30 35 0.8020489
## 0.2 5 30 40 0.8097550
## 0.2 5 30 45 0.8067986
## 0.2 5 30 50 0.8044238
## 0.3 1 10 5 0.7629877
## 0.3 1 10 10 0.7665432
## 0.3 1 10 15 0.7759878
## 0.3 1 10 20 0.7795500
## 0.3 1 10 25 0.7807374
## 0.3 1 10 30 0.7836916
## 0.3 1 10 35 0.7860729
## 0.3 1 10 40 0.7872494
## 0.3 1 10 45 0.7869531
## 0.3 1 10 50 0.7810315
## 0.3 1 20 5 0.7709289
## 0.3 1 20 10 0.7709463
## 0.3 1 20 15 0.7751251
## 0.3 1 20 20 0.7798418
## 0.3 1 20 25 0.7783538
## 0.3 1 20 30 0.7792667
## 0.3 1 20 35 0.7842755
## 0.3 1 20 40 0.7813300
## 0.3 1 20 45 0.7786697
## 0.3 1 20 50 0.7810292
## 0.3 1 30 5 0.7694604
## 0.3 1 30 10 0.7670770
## 0.3 1 30 15 0.7727393
## 0.3 1 30 20 0.7706565
## 0.3 1 30 25 0.7712688
## 0.3 1 30 30 0.7736501
## 0.3 1 30 35 0.7754192
## 0.3 1 30 40 0.7777764
## 0.3 1 30 45 0.7807635
## 0.3 1 30 50 0.7766022
## 0.3 2 10 5 0.7688984
## 0.3 2 10 10 0.7952495
## 0.3 2 10 15 0.8044216
## 0.3 2 10 20 0.7999771
## 0.3 2 10 25 0.8002756
## 0.3 2 10 30 0.8002691
## 0.3 2 10 35 0.8038465
## 0.3 2 10 40 0.8059009
## 0.3 2 10 45 0.8047331
## 0.3 2 10 50 0.8064978
## 0.3 2 20 5 0.7706652
## 0.3 2 20 10 0.7893124
## 0.3 2 20 15 0.7934674
## 0.3 2 20 20 0.7997028
## 0.3 2 20 25 0.8020688
## 0.3 2 20 30 0.8005982
## 0.3 2 20 35 0.8056242
## 0.3 2 20 40 0.8094696
## 0.3 2 20 45 0.8094630
## 0.3 2 20 50 0.8097441
## 0.3 2 30 5 0.7668286
## 0.3 2 30 10 0.7759986
## 0.3 2 30 15 0.7845825
## 0.3 2 30 20 0.7869814
## 0.3 2 30 25 0.7854759
## 0.3 2 30 30 0.7902036
## 0.3 2 30 35 0.7887287
## 0.3 2 30 40 0.8014521
## 0.3 2 30 45 0.7976024
## 0.3 2 30 50 0.8014476
## 0.3 3 10 5 0.7872383
## 0.3 3 10 10 0.8032321
## 0.3 3 10 15 0.8020578
## 0.3 3 10 20 0.8035415
## 0.3 3 10 25 0.8044347
## 0.3 3 10 30 0.8056111
## 0.3 3 10 35 0.8118247
## 0.3 3 10 40 0.8053193
## 0.3 3 10 45 0.8082626
## 0.3 3 10 50 0.8070818
## 0.3 3 20 5 0.7789771
## 0.3 3 20 10 0.7943605
## 0.3 3 20 15 0.7993868
## 0.3 3 20 20 0.8100491
## 0.3 3 20 25 0.8085589
## 0.3 3 20 30 0.8103476
## 0.3 3 20 35 0.8130209
## 0.3 3 20 40 0.8118291
## 0.3 3 20 45 0.8100688
## 0.3 3 20 50 0.8121321
## 0.3 3 30 5 0.7760032
## 0.3 3 30 10 0.7837135
## 0.3 3 30 15 0.7881273
## 0.3 3 30 20 0.7955347
## 0.3 3 30 25 0.7973170
## 0.3 3 30 30 0.8020577
## 0.3 3 30 35 0.8065023
## 0.3 3 30 40 0.8056025
## 0.3 3 30 45 0.8124085
## 0.3 3 30 50 0.8094522
## 0.3 4 10 5 0.7914216
## 0.3 4 10 10 0.8044435
## 0.3 4 10 15 0.8029532
## 0.3 4 10 20 0.8014718
## 0.3 4 10 25 0.8091667
## 0.3 4 10 30 0.8085698
## 0.3 4 10 35 0.8064805
## 0.3 4 10 40 0.8088597
## 0.3 4 10 45 0.8032060
## 0.3 4 10 50 0.8002779
## 0.3 4 20 5 0.7766132
## 0.3 4 20 10 0.7943364
## 0.3 4 20 15 0.7999903
## 0.3 4 20 20 0.8044326
## 0.3 4 20 25 0.8088727
## 0.3 4 20 30 0.8112496
## 0.3 4 20 35 0.8100426
## 0.3 4 20 40 0.8088617
## 0.3 4 20 45 0.8097463
## 0.3 4 20 50 0.8088705
## 0.3 4 30 5 0.7733495
## 0.3 4 30 10 0.7795389
## 0.3 4 30 15 0.7904824
## 0.3 4 30 20 0.8011491
## 0.3 4 30 25 0.8020489
## 0.3 4 30 30 0.8038574
## 0.3 4 30 35 0.8097637
## 0.3 4 30 40 0.8139054
## 0.3 4 30 45 0.8112301
## 0.3 4 30 50 0.8121298
## 0.3 5 10 5 0.7896197
## 0.3 5 10 10 0.8011842
## 0.3 5 10 15 0.7982321
## 0.3 5 10 20 0.8014827
## 0.3 5 10 25 0.8008968
## 0.3 5 10 30 0.8094764
## 0.3 5 10 35 0.8038379
## 0.3 5 10 40 0.8032452
## 0.3 5 10 45 0.8035328
## 0.3 5 10 50 0.8044172
## 0.3 5 20 5 0.7828288
## 0.3 5 20 10 0.7979292
## 0.3 5 20 15 0.8070925
## 0.3 5 20 20 0.8109445
## 0.3 5 20 25 0.8141798
## 0.3 5 20 30 0.8130077
## 0.3 5 20 35 0.8159532
## 0.3 5 20 40 0.8124065
## 0.3 5 20 45 0.8115242
## 0.3 5 20 50 0.8088553
## 0.3 5 30 5 0.7733320
## 0.3 5 30 10 0.7792449
## 0.3 5 30 15 0.7922952
## 0.3 5 30 20 0.8005741
## 0.3 5 30 25 0.8064891
## 0.3 5 30 30 0.8076743
## 0.3 5 30 35 0.8088639
## 0.3 5 30 40 0.8082627
## 0.3 5 30 45 0.8115242
## 0.3 5 30 50 0.8100426
## Kappa Accuracy SD Kappa SD
## 0.5076039 0.04109361 0.08625388
## 0.5075779 0.04036660 0.08278741
## 0.5096842 0.04089764 0.08441598
## 0.5125854 0.04194913 0.08649251
## 0.5151579 0.04249631 0.08761360
## 0.5189609 0.04085342 0.08474285
## 0.5268325 0.04268683 0.08845805
## 0.5226071 0.04398196 0.09038121
## 0.5377509 0.04815179 0.09983048
## 0.5522024 0.04537137 0.09274322
## 0.5148522 0.04690653 0.09776215
## 0.5143781 0.04557419 0.09404177
## 0.5124787 0.04643057 0.09657169
## 0.5176696 0.04816493 0.10003366
## 0.5211326 0.05030914 0.10434950
## 0.5194282 0.04762548 0.09840924
## 0.5239292 0.04869914 0.10054480
## 0.5218169 0.04553362 0.09342422
## 0.5261565 0.04281365 0.08773389
## 0.5284063 0.04429297 0.09036232
## 0.5096842 0.04089764 0.08441598
## 0.5184834 0.04699744 0.09746740
## 0.5096842 0.04089764 0.08441598
## 0.5138082 0.04238335 0.08715462
## 0.5152669 0.04277144 0.08795833
## 0.5129152 0.04233791 0.08675609
## 0.5086150 0.04202676 0.08582201
## 0.5094311 0.04434393 0.09012278
## 0.5111905 0.04318451 0.08829421
## 0.5189913 0.04327424 0.08807619
## 0.5175588 0.02955306 0.06099920
## 0.5013281 0.03731496 0.07342117
## 0.5121850 0.03797362 0.07542406
## 0.5327311 0.03862864 0.07848536
## 0.5531304 0.03573868 0.07272478
## 0.5728126 0.03283221 0.06661084
## 0.5755592 0.03461442 0.07131651
## 0.5748925 0.03836310 0.08139844
## 0.5733039 0.03454678 0.07166469
## 0.5756245 0.03245140 0.06730315
## 0.5213100 0.02152455 0.05095544
## 0.5078162 0.03784780 0.07526784
## 0.5011413 0.03772270 0.07459307
## 0.5222073 0.04018724 0.07902225
## 0.5377672 0.03852563 0.07825214
## 0.5487356 0.03902357 0.07981537
## 0.5594051 0.03405851 0.07093463
## 0.5708445 0.03416711 0.06997745
## 0.5675487 0.03747646 0.07736933
## 0.5760492 0.03485519 0.07229500
## 0.5213100 0.02152455 0.05095544
## 0.5141004 0.03128100 0.06776064
## 0.5062801 0.03251339 0.06590047
## 0.5054374 0.03108089 0.06284462
## 0.5165689 0.03513114 0.07021495
## 0.5310920 0.03712679 0.07574970
## 0.5302199 0.03325284 0.06819921
## 0.5389278 0.03808922 0.07891001
## 0.5322533 0.03797813 0.07925735
## 0.5384944 0.03187188 0.06692075
## 0.5183923 0.02297541 0.05138336
## 0.5176027 0.03201758 0.06379236
## 0.5489239 0.03039948 0.06503384
## 0.5761347 0.03134652 0.06335007
## 0.5826072 0.03154359 0.06439724
## 0.5851293 0.03120455 0.06523071
## 0.5818726 0.03251205 0.06837112
## 0.5907046 0.03192052 0.06552983
## 0.5916830 0.02959812 0.06073760
## 0.5875177 0.03077353 0.06402562
## 0.5213100 0.02152455 0.05095544
## 0.5092355 0.03742428 0.07470615
## 0.5159406 0.04203777 0.08433922
## 0.5478035 0.03694859 0.07551868
## 0.5557258 0.04015328 0.08266016
## 0.5676968 0.03434081 0.07128932
## 0.5759916 0.03629467 0.07558158
## 0.5791580 0.03584918 0.07508882
## 0.5791266 0.03251351 0.06710495
## 0.5764155 0.03254344 0.06915580
## 0.5213100 0.02152455 0.05095544
## 0.5080019 0.03698989 0.07280167
## 0.5113073 0.02953740 0.06184614
## 0.5273182 0.03267575 0.06922616
## 0.5271039 0.03249820 0.06699687
## 0.5366589 0.03786335 0.07765569
## 0.5387421 0.03903135 0.08054338
## 0.5429512 0.03777564 0.07852863
## 0.5441142 0.03770652 0.07877465
## 0.5470530 0.03685731 0.07717848
## 0.5232440 0.02454842 0.05699751
## 0.5454102 0.02764014 0.05840005
## 0.5793553 0.03210739 0.07044302
## 0.5959995 0.02856420 0.06010641
## 0.5981550 0.02658368 0.05474358
## 0.6002488 0.02663878 0.05598578
## 0.5950835 0.02749477 0.05832082
## 0.5991693 0.03020445 0.06441936
## 0.6085242 0.03199389 0.06838405
## 0.6060848 0.03256386 0.06952893
## 0.5190530 0.02298929 0.05365006
## 0.5241182 0.02828445 0.06279925
## 0.5343323 0.03650077 0.07897491
## 0.5482101 0.03329880 0.07189492
## 0.5551829 0.03419294 0.07319042
## 0.5646453 0.03651268 0.07755369
## 0.5741573 0.03327655 0.07076414
## 0.5789351 0.03132005 0.06616363
## 0.5838577 0.02930055 0.06211104
## 0.5950659 0.02822253 0.05985576
## 0.5213100 0.02152455 0.05095544
## 0.5129001 0.02551997 0.05532715
## 0.5143028 0.03680123 0.07461167
## 0.5210360 0.03972924 0.08163682
## 0.5253807 0.03840247 0.07947669
## 0.5417008 0.03802720 0.07989643
## 0.5464618 0.03973444 0.08429474
## 0.5591978 0.04384399 0.09134254
## 0.5644824 0.03952669 0.08309843
## 0.5732771 0.03611418 0.07591907
## 0.5284078 0.02380003 0.05473521
## 0.5566749 0.03418736 0.07660423
## 0.5795601 0.03295329 0.07133374
## 0.5880549 0.03116554 0.06688114
## 0.5894423 0.02819350 0.06100916
## 0.5958218 0.02912897 0.06300663
## 0.5984637 0.03081703 0.06632380
## 0.6051336 0.03164944 0.06772540
## 0.6130816 0.03372100 0.07173444
## 0.6103864 0.03614719 0.07651870
## 0.5211476 0.02174908 0.05131352
## 0.5198622 0.02529036 0.05730606
## 0.5447671 0.03361916 0.07361804
## 0.5494535 0.03217431 0.06704887
## 0.5678168 0.03011052 0.06290892
## 0.5772009 0.02945917 0.06179046
## 0.5924384 0.03436586 0.07326183
## 0.5976637 0.03319264 0.06984863
## 0.5878723 0.03450372 0.07338230
## 0.6015569 0.03102212 0.06512620
## 0.5213100 0.02152455 0.05095544
## 0.5158157 0.02797872 0.05910639
## 0.5211805 0.03293776 0.06891332
## 0.5241776 0.03544275 0.07358851
## 0.5364619 0.04158817 0.08542879
## 0.5379413 0.03705418 0.07788416
## 0.5459351 0.03166404 0.06666944
## 0.5570653 0.03668386 0.07642142
## 0.5634764 0.03602403 0.07542007
## 0.5720147 0.03589099 0.07582715
## 0.5096842 0.04089764 0.08441598
## 0.5097745 0.04042629 0.08382582
## 0.5113034 0.04028399 0.08190335
## 0.5280877 0.04328993 0.08949897
## 0.5427489 0.04542295 0.09173215
## 0.5388583 0.04392948 0.08916430
## 0.5559891 0.04237092 0.08631715
## 0.5514131 0.03770301 0.07735375
## 0.5594434 0.03954732 0.08125163
## 0.5555267 0.04101671 0.08454199
## 0.5065067 0.04000939 0.08188262
## 0.5099226 0.04110228 0.08426382
## 0.5202525 0.04402044 0.08958087
## 0.5290694 0.04691860 0.09668624
## 0.5404020 0.04625627 0.09508709
## 0.5367168 0.04154235 0.08470142
## 0.5492057 0.04051175 0.08295952
## 0.5465520 0.03830180 0.07826517
## 0.5484461 0.03701835 0.07596669
## 0.5575396 0.04079483 0.08395236
## 0.5125757 0.04551904 0.09405954
## 0.5132541 0.04233581 0.08730651
## 0.5141972 0.04737213 0.09703677
## 0.5172625 0.04406786 0.09041093
## 0.5218390 0.04582741 0.09341824
## 0.5257537 0.04423183 0.09154254
## 0.5338981 0.04639196 0.09574870
## 0.5298938 0.04709359 0.09658647
## 0.5297235 0.04427554 0.09167856
## 0.5315917 0.04560200 0.09470374
## 0.5079217 0.03853810 0.07627750
## 0.5360035 0.03524754 0.06972443
## 0.5739809 0.02615674 0.05398488
## 0.5747495 0.02801653 0.05824022
## 0.5762993 0.02613900 0.05267640
## 0.5801644 0.02994956 0.06217244
## 0.5837482 0.03072015 0.06434400
## 0.5786283 0.03082365 0.06446365
## 0.5782029 0.03342865 0.06948328
## 0.5820599 0.03249122 0.06758447
## 0.5075192 0.03639621 0.07580686
## 0.5206488 0.03481448 0.07139675
## 0.5409527 0.03389840 0.06868138
## 0.5519179 0.03932030 0.08155925
## 0.5703539 0.03685034 0.07738060
## 0.5721592 0.03821345 0.07983189
## 0.5793062 0.03274957 0.06818837
## 0.5821861 0.03734070 0.07817908
## 0.5772583 0.03350335 0.06955575
## 0.5752508 0.03111882 0.06356680
## 0.5054414 0.03721578 0.07392187
## 0.5014728 0.03258299 0.06757576
## 0.5136569 0.03671668 0.07512743
## 0.5288001 0.03911664 0.08216517
## 0.5425643 0.03773109 0.07919908
## 0.5428113 0.04025326 0.08417897
## 0.5421458 0.04164559 0.08755444
## 0.5531057 0.03870218 0.08179105
## 0.5653479 0.03702377 0.07863791
## 0.5659117 0.02997941 0.06337842
## 0.5355387 0.02755000 0.05764481
## 0.5682288 0.03150435 0.06734523
## 0.5806250 0.03024239 0.06471188
## 0.5818961 0.02982371 0.06318456
## 0.5898285 0.02852691 0.05891095
## 0.5897497 0.03179742 0.06652565
## 0.5928928 0.03423097 0.07040280
## 0.5907854 0.03443952 0.07040868
## 0.5989032 0.03445367 0.07087042
## 0.5956191 0.03389762 0.06959290
## 0.5162144 0.03101454 0.06443504
## 0.5382354 0.03966719 0.08405207
## 0.5530446 0.03535511 0.07440991
## 0.5727021 0.03274281 0.06810121
## 0.5779696 0.03160342 0.06779031
## 0.5851008 0.03484363 0.07418638
## 0.5987294 0.03265857 0.06847261
## 0.6033878 0.03330392 0.06936604
## 0.6015052 0.03618210 0.07661692
## 0.6006915 0.03505363 0.07414880
## 0.5157394 0.02363133 0.05414740
## 0.5063243 0.03909304 0.08285609
## 0.5429770 0.03145474 0.06685636
## 0.5330626 0.03763184 0.08145356
## 0.5570273 0.03948657 0.08265040
## 0.5683632 0.03730431 0.07849854
## 0.5631842 0.03663926 0.07678405
## 0.5758562 0.03483930 0.07315695
## 0.5712759 0.03045508 0.06327053
## 0.5869201 0.03721804 0.07814438
## 0.5471224 0.03383751 0.07245848
## 0.5851025 0.02875253 0.06083880
## 0.5754297 0.03077736 0.06437173
## 0.5818841 0.03110770 0.06679376
## 0.6002900 0.02942571 0.06159563
## 0.6011870 0.03030955 0.06426296
## 0.6079054 0.02847538 0.06059109
## 0.5990938 0.02417482 0.05233494
## 0.6013742 0.02210729 0.04810795
## 0.6061870 0.02791413 0.05881502
## 0.5171456 0.02503762 0.05743251
## 0.5387677 0.03297126 0.06992072
## 0.5639126 0.03331173 0.07277313
## 0.5808149 0.03127880 0.06619241
## 0.5927774 0.02720043 0.05811248
## 0.5991408 0.03406167 0.07042201
## 0.6047117 0.03330791 0.06925659
## 0.6189718 0.02876857 0.05908081
## 0.6154176 0.02933371 0.06084238
## 0.6145587 0.02983024 0.06136669
## 0.5108602 0.02753795 0.05908533
## 0.5200771 0.02963807 0.06317179
## 0.5448621 0.03163938 0.06643466
## 0.5572472 0.03715770 0.07711604
## 0.5729225 0.03819616 0.07986788
## 0.5849380 0.04387361 0.09126109
## 0.5915193 0.03492118 0.07253683
## 0.5982519 0.03531545 0.07359388
## 0.5905825 0.03301882 0.06948311
## 0.5968970 0.02975709 0.06225711
## 0.5481971 0.03105093 0.06775289
## 0.5772178 0.02378279 0.04990814
## 0.5971521 0.02734852 0.05777045
## 0.6066114 0.02748552 0.05777791
## 0.6075743 0.02789555 0.05801328
## 0.6053878 0.02752251 0.05761901
## 0.5907568 0.02862461 0.06049590
## 0.5940568 0.02869773 0.05988225
## 0.5901794 0.02382209 0.04986138
## 0.5968362 0.02389333 0.04969293
## 0.5223668 0.02616293 0.05792231
## 0.5523792 0.02878512 0.06159664
## 0.5660404 0.02952033 0.06356401
## 0.5915798 0.02519588 0.05303270
## 0.6020469 0.02752672 0.05668169
## 0.6119612 0.02517679 0.05263668
## 0.6149341 0.02421316 0.05030188
## 0.6228926 0.02930726 0.06109909
## 0.6112014 0.02558445 0.05293903
## 0.6096875 0.02772749 0.05825149
## 0.5170963 0.02476453 0.05553599
## 0.5151831 0.03428780 0.07097545
## 0.5307831 0.03262224 0.06886704
## 0.5506689 0.03772972 0.07864446
## 0.5670876 0.03468113 0.07156817
## 0.5751079 0.03111586 0.06496373
## 0.5839473 0.03177174 0.06729742
## 0.6006997 0.02424229 0.05127555
## 0.5945101 0.02645968 0.05671359
## 0.5897804 0.02693891 0.05647164
## 0.5042317 0.04384368 0.09169462
## 0.5116983 0.04635709 0.09471758
## 0.5313406 0.04366831 0.09057961
## 0.5417169 0.03600518 0.07295383
## 0.5442813 0.03760072 0.07806973
## 0.5506486 0.03492069 0.07075305
## 0.5546959 0.03571396 0.07309115
## 0.5568940 0.04199370 0.08714508
## 0.5564235 0.04430977 0.09116239
## 0.5444169 0.04369730 0.09047547
## 0.5212752 0.04707608 0.09720894
## 0.5214658 0.04499715 0.09306589
## 0.5309629 0.04174981 0.08537250
## 0.5422248 0.04156958 0.08675282
## 0.5400004 0.04451907 0.09187509
## 0.5411602 0.03883559 0.08014758
## 0.5513452 0.04385616 0.09079675
## 0.5450848 0.03872794 0.08008243
## 0.5402264 0.03435820 0.07034475
## 0.5449724 0.03538585 0.07296943
## 0.5188928 0.04727114 0.09785920
## 0.5137496 0.04858935 0.10136736
## 0.5259463 0.04446813 0.09003924
## 0.5206879 0.04832771 0.09887403
## 0.5227818 0.04496342 0.09210506
## 0.5283646 0.03762565 0.07612483
## 0.5311199 0.04211432 0.08647997
## 0.5358893 0.04620546 0.09478386
## 0.5415556 0.03892112 0.08122184
## 0.5337292 0.04504885 0.09346850
## 0.5044123 0.03775535 0.07447129
## 0.5642861 0.03142521 0.06605216
## 0.5863658 0.02643911 0.05574419
## 0.5788642 0.03157367 0.06709909
## 0.5805628 0.03552186 0.07430208
## 0.5802454 0.03754532 0.07901513
## 0.5887020 0.03661169 0.07723999
## 0.5935198 0.03418836 0.07300737
## 0.5912376 0.03279460 0.06912068
## 0.5954065 0.03463777 0.07258563
## 0.5089089 0.03655702 0.07330838
## 0.5502465 0.03551789 0.07336626
## 0.5628419 0.03490337 0.07189782
## 0.5760193 0.02298250 0.04879009
## 0.5821927 0.02977339 0.06225723
## 0.5800978 0.03052812 0.06414805
## 0.5906182 0.02667596 0.05631229
## 0.5991200 0.02777143 0.05892695
## 0.5988207 0.03564606 0.07645734
## 0.6001534 0.03607723 0.07664771
## 0.4989912 0.03302064 0.06698692
## 0.5208135 0.03716078 0.07714448
## 0.5422144 0.03392747 0.06919272
## 0.5477268 0.03304372 0.07001594
## 0.5454116 0.03798427 0.08016327
## 0.5562474 0.03870330 0.08041220
## 0.5550359 0.03518586 0.07339118
## 0.5817618 0.03630595 0.07601738
## 0.5744521 0.02986221 0.06199226
## 0.5821185 0.03600664 0.07428438
## 0.5441500 0.04197669 0.09029240
## 0.5841730 0.03171718 0.06397713
## 0.5834974 0.03399715 0.07024507
## 0.5867952 0.03128235 0.06673191
## 0.5890839 0.03428360 0.07308671
## 0.5916379 0.03414146 0.07275355
## 0.6053033 0.03153638 0.06636247
## 0.5925895 0.03097384 0.06430859
## 0.5986427 0.03381693 0.07090420
## 0.5961111 0.02935593 0.06152150
## 0.5226873 0.02758189 0.06096110
## 0.5632308 0.03816074 0.08161615
## 0.5749646 0.03041004 0.06504771
## 0.5997318 0.03525065 0.07495393
## 0.5973271 0.02959861 0.06275124
## 0.6016746 0.03006988 0.06406117
## 0.6073858 0.03462254 0.07299245
## 0.6058377 0.03301105 0.06848608
## 0.6020970 0.03125907 0.06454750
## 0.6057274 0.03241306 0.06752622
## 0.5148866 0.02905466 0.06433447
## 0.5390347 0.03444367 0.07208687
## 0.5498964 0.04034992 0.08495636
## 0.5691522 0.03893100 0.08172473
## 0.5729750 0.03301208 0.07051275
## 0.5841490 0.02770665 0.05794515
## 0.5936804 0.02405845 0.05203295
## 0.5919237 0.02549919 0.05342341
## 0.6058790 0.02843001 0.05988551
## 0.5999448 0.02753489 0.05859149
## 0.5552277 0.03164579 0.06690612
## 0.5869941 0.02537365 0.05411274
## 0.5867479 0.02977445 0.06139830
## 0.5846483 0.03051675 0.06451691
## 0.6007183 0.02836043 0.05951653
## 0.5995462 0.02510463 0.05209373
## 0.5967910 0.02685240 0.05463060
## 0.6008504 0.02893028 0.06099129
## 0.5897348 0.03273025 0.06770714
## 0.5838191 0.02802303 0.05836531
## 0.5209105 0.03655691 0.07452222
## 0.5635245 0.03541104 0.07421372
## 0.5779275 0.03093168 0.06593276
## 0.5889012 0.02866678 0.06051698
## 0.5986066 0.02909130 0.06081404
## 0.6041638 0.03004180 0.06282546
## 0.6016781 0.03020032 0.06330321
## 0.5992187 0.03053641 0.06493718
## 0.6017382 0.02806929 0.05880999
## 0.6001568 0.03377323 0.07066079
## 0.5120004 0.03332007 0.07139964
## 0.5301313 0.03469333 0.07131935
## 0.5566584 0.03874601 0.08242654
## 0.5804191 0.03394473 0.07152096
## 0.5836423 0.03216495 0.06764591
## 0.5887043 0.02847678 0.06055578
## 0.6018528 0.02752930 0.05873195
## 0.6104733 0.02355763 0.04976941
## 0.6046108 0.03010337 0.06286530
## 0.6067995 0.02414104 0.05078873
## 0.5520928 0.03020684 0.06206806
## 0.5799999 0.02759902 0.05974073
## 0.5758925 0.03759393 0.08009871
## 0.5846172 0.03451766 0.07274120
## 0.5839624 0.03765868 0.07840593
## 0.6020309 0.03161781 0.06545618
## 0.5897636 0.03581917 0.07437561
## 0.5891368 0.03432899 0.07127742
## 0.5899718 0.03482340 0.07264926
## 0.5920343 0.03432148 0.07072547
## 0.5336794 0.04045648 0.08430304
## 0.5736655 0.04120289 0.08451669
## 0.5948249 0.03199455 0.06570885
## 0.6026876 0.02995352 0.06227179
## 0.6108138 0.03077921 0.06367999
## 0.6082740 0.02804759 0.05863661
## 0.6142975 0.02844371 0.06003546
## 0.6074679 0.02931430 0.06172579
## 0.6056868 0.02681293 0.05639974
## 0.6002665 0.03161779 0.06557887
## 0.5104267 0.03222468 0.06566327
## 0.5332100 0.04378971 0.09090689
## 0.5617907 0.03623420 0.07596338
## 0.5806184 0.03298525 0.06823559
## 0.5943340 0.02943948 0.06157529
## 0.5970835 0.03087593 0.06494774
## 0.5999543 0.02633078 0.05563549
## 0.5988983 0.02944703 0.06202289
## 0.6051584 0.02715112 0.05723168
## 0.6021276 0.02462742 0.05376976
##
## Accuracy was used to select the optimal model using the largest value.
## The final values used for the model were n.trees = 40, interaction.depth
## = 5, shrinkage = 0.2 and n.minobsinnode = 20.
trellis.par.set(caretTheme())
plot(gbmFit)
ggplot(gbmFit)
##Accuracy 0.8118678 (Stand.Dev. 0.03860621)