1 Reading in the Data

QDA.All<-NULL
SVM.L.GS.All<-NULL
SVM.L.BO.All<-NULL
SVM.P.GS.All<-NULL
SVM.P.BO.All<-NULL
SVM.R.GS.All<-NULL
SVM.R.BO.All<-NULL
ArcLakeGroupSummary <- read_excel("~/Desktop/EPSRC Project /ArcLakeGroupSummary.xlsx")
dundeedata <- read_csv("~/Desktop/EPSRC Project /dundeedata.csv.xls")

colnames(dundeedata)[1]<-"GloboLakes_ID" # change the GloboLID column name to GloboLakes_ID to make the merge easier.

Data<-merge(ArcLakeGroupSummary, dundeedata, by = "GloboLakes_ID", all = TRUE )
Data<-subset(Data, Group!="NA") # The data set is back to the original 732 rows just with extra columns of information

Data$Group<-as.factor(Data$Group)

Data1<-data.frame(Data[,c("Group","PC1","PC2")])

# Stratify the entire training set into training and test sets

set.seed(1)

train.index<-createDataPartition(Data1$Group, p=0.8, list = FALSE)
train.set<-Data1[train.index, ]
test.set<-Data1[-train.index, ]

2 QDA

for(j in 1:500){
  
  set.seed(j)
  
  folds <- createFolds(y=factor(train.set$Group), k = 5, list = FALSE)
  train.set$fold <- folds
  
  
  # Using QDA to produce the CV error rate
  
  CV.error<-NULL 
  
  for (i in 1:5) { 
    valid.data <- subset(train.set, fold == i)
    train.data <- subset(train.set, fold != i) 
    
    qda.fit<-qda(formula = Group~PC1+PC2, data=train.data)
    qda.y <- valid.data$Group
    qda.predy<-predict(qda.fit, valid.data)$class
    
    ith.test.error<- mean(qda.y!=qda.predy) 
    CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error) 
  }
  
  QDA.All<-c(QDA.All, sum(CV.error))
  
}

QDA.All
##   [1] 0.05593220 0.05593220 0.05254237 0.05423729 0.05084746 0.05593220
##   [7] 0.05593220 0.05254237 0.05084746 0.05593220 0.05423729 0.05254237
##  [13] 0.05423729 0.05084746 0.05762712 0.05254237 0.04576271 0.05593220
##  [19] 0.04576271 0.05593220 0.04915254 0.05762712 0.05932203 0.05593220
##  [25] 0.05423729 0.04576271 0.05254237 0.05254237 0.05762712 0.05254237
##  [31] 0.04745763 0.05593220 0.04576271 0.05084746 0.05762712 0.05084746
##  [37] 0.05254237 0.05254237 0.04915254 0.05423729 0.05084746 0.05084746
##  [43] 0.06271186 0.05762712 0.05254237 0.05084746 0.05423729 0.05254237
##  [49] 0.05593220 0.05084746 0.05593220 0.05593220 0.04915254 0.05084746
##  [55] 0.04915254 0.05254237 0.05084746 0.05084746 0.05254237 0.05254237
##  [61] 0.05423729 0.05084746 0.05254237 0.05254237 0.05084746 0.06440678
##  [67] 0.05423729 0.05423729 0.04915254 0.05254237 0.05254237 0.05423729
##  [73] 0.05084746 0.05423729 0.05423729 0.05423729 0.05254237 0.05423729
##  [79] 0.04915254 0.05423729 0.05254237 0.05762712 0.05084746 0.05254237
##  [85] 0.06440678 0.06101695 0.04915254 0.05423729 0.06101695 0.05423729
##  [91] 0.05254237 0.05084746 0.04406780 0.05084746 0.05762712 0.05084746
##  [97] 0.05084746 0.04915254 0.05932203 0.05593220 0.05254237 0.05084746
## [103] 0.05084746 0.04745763 0.06271186 0.05084746 0.05423729 0.04745763
## [109] 0.05762712 0.04745763 0.05254237 0.05084746 0.05932203 0.05254237
## [115] 0.05084746 0.05423729 0.04915254 0.04915254 0.05084746 0.05423729
## [121] 0.05932203 0.05932203 0.05254237 0.05254237 0.04915254 0.05084746
## [127] 0.05254237 0.05423729 0.05254237 0.04915254 0.05254237 0.05423729
## [133] 0.05254237 0.05254237 0.05593220 0.05254237 0.05423729 0.05254237
## [139] 0.05593220 0.04915254 0.05254237 0.05762712 0.05762712 0.05423729
## [145] 0.05932203 0.05254237 0.06610169 0.05423729 0.05593220 0.05084746
## [151] 0.05254237 0.05084746 0.04915254 0.04915254 0.05423729 0.05593220
## [157] 0.04915254 0.05254237 0.05254237 0.05593220 0.05254237 0.04915254
## [163] 0.05084746 0.05254237 0.05254237 0.05084746 0.05423729 0.05084746
## [169] 0.04915254 0.05254237 0.04915254 0.05084746 0.05254237 0.04576271
## [175] 0.05762712 0.04915254 0.05423729 0.05932203 0.05762712 0.05254237
## [181] 0.05084746 0.04745763 0.05423729 0.05084746 0.05254237 0.05084746
## [187] 0.04576271 0.04745763 0.05593220 0.05254237 0.04915254 0.05932203
## [193] 0.04915254 0.05423729 0.05084746 0.05762712 0.05254237 0.05254237
## [199] 0.05593220 0.04745763 0.05593220 0.04745763 0.05423729 0.04915254
## [205] 0.04745763 0.05593220 0.05254237 0.05423729 0.05423729 0.04915254
## [211] 0.05254237 0.05762712 0.05593220 0.04745763 0.05084746 0.05254237
## [217] 0.05423729 0.05423729 0.05254237 0.04745763 0.05593220 0.05254237
## [223] 0.05423729 0.05593220 0.05762712 0.05593220 0.05423729 0.05254237
## [229] 0.05423729 0.05084746 0.04915254 0.04576271 0.05254237 0.05423729
## [235] 0.05593220 0.04576271 0.04745763 0.05762712 0.05593220 0.05932203
## [241] 0.04576271 0.05084746 0.05593220 0.05084746 0.05254237 0.05423729
## [247] 0.05762712 0.05423729 0.06101695 0.05084746 0.05084746 0.05593220
## [253] 0.05084746 0.05084746 0.05762712 0.05084746 0.05254237 0.04915254
## [259] 0.06101695 0.05593220 0.04745763 0.05423729 0.05593220 0.05084746
## [265] 0.06101695 0.04915254 0.05254237 0.05084746 0.05084746 0.05084746
## [271] 0.05254237 0.05084746 0.05423729 0.05254237 0.05593220 0.04915254
## [277] 0.04576271 0.05593220 0.05423729 0.05593220 0.05423729 0.05423729
## [283] 0.04915254 0.05084746 0.05084746 0.05084746 0.05084746 0.05593220
## [289] 0.05593220 0.05254237 0.05084746 0.05423729 0.05254237 0.05593220
## [295] 0.05423729 0.05254237 0.06271186 0.05423729 0.05254237 0.05423729
## [301] 0.05762712 0.05084746 0.04745763 0.05593220 0.04745763 0.05254237
## [307] 0.05423729 0.05423729 0.04745763 0.05593220 0.04745763 0.05762712
## [313] 0.05932203 0.04406780 0.05593220 0.05084746 0.04915254 0.06101695
## [319] 0.05254237 0.05254237 0.05084746 0.06101695 0.05932203 0.05762712
## [325] 0.04915254 0.05423729 0.05084746 0.04745763 0.05423729 0.06101695
## [331] 0.05254237 0.05254237 0.05084746 0.05593220 0.05423729 0.05254237
## [337] 0.05423729 0.05593220 0.04915254 0.05423729 0.05084746 0.04576271
## [343] 0.05084746 0.04915254 0.05593220 0.05593220 0.05423729 0.04915254
## [349] 0.05423729 0.04915254 0.05254237 0.05084746 0.05254237 0.05423729
## [355] 0.05084746 0.05254237 0.05254237 0.05254237 0.05084746 0.05254237
## [361] 0.06101695 0.05593220 0.05593220 0.05762712 0.05084746 0.05423729
## [367] 0.05762712 0.05762712 0.05423729 0.04576271 0.05762712 0.04576271
## [373] 0.05423729 0.05593220 0.05254237 0.04576271 0.04745763 0.05593220
## [379] 0.05423729 0.04576271 0.05593220 0.04745763 0.05084746 0.05084746
## [385] 0.05423729 0.05254237 0.05593220 0.05084746 0.05423729 0.05084746
## [391] 0.04576271 0.05254237 0.05593220 0.06101695 0.04915254 0.04915254
## [397] 0.05254237 0.05593220 0.05593220 0.05084746 0.05932203 0.05593220
## [403] 0.05084746 0.04067797 0.05593220 0.04745763 0.04915254 0.05084746
## [409] 0.04576271 0.05254237 0.05593220 0.04745763 0.05762712 0.05423729
## [415] 0.05084746 0.05254237 0.04576271 0.05254237 0.05593220 0.04745763
## [421] 0.05932203 0.05254237 0.04915254 0.05254237 0.04915254 0.05593220
## [427] 0.04745763 0.05254237 0.05254237 0.04915254 0.05423729 0.05084746
## [433] 0.04576271 0.04915254 0.04745763 0.05254237 0.06101695 0.05423729
## [439] 0.05254237 0.05254237 0.04745763 0.05593220 0.05084746 0.05762712
## [445] 0.04915254 0.05593220 0.04915254 0.05254237 0.06271186 0.05423729
## [451] 0.05254237 0.05593220 0.05762712 0.04745763 0.05423729 0.05593220
## [457] 0.05254237 0.05254237 0.05254237 0.05423729 0.04406780 0.05932203
## [463] 0.04915254 0.04745763 0.05932203 0.05423729 0.05423729 0.05762712
## [469] 0.05593220 0.04915254 0.04745763 0.06440678 0.04406780 0.05084746
## [475] 0.04745763 0.05084746 0.05932203 0.05593220 0.05423729 0.05254237
## [481] 0.04915254 0.05593220 0.05593220 0.05423729 0.04745763 0.04745763
## [487] 0.05762712 0.05084746 0.05423729 0.05084746 0.04745763 0.05423729
## [493] 0.05254237 0.05423729 0.05254237 0.05254237 0.05423729 0.05593220
## [499] 0.05084746 0.06101695

3 SVM Linear GS

for(j in 1:500){
  
  set.seed(j)
  
  folds <- createFolds(y=factor(train.set$Group), k = 5, list = FALSE)
  train.set$fold <- folds
  
  
    # Using SVM linear kernel cost = 0.006737947 - the best linear kernel SVM model considered.
    
     CV.error<-NULL 
  
    for (i in 1:5) { 
    valid.data <- subset(train.set, fold == i)
    train.data <- subset(train.set, fold != i) 
    
    svmfit<-svm(Group~PC1+PC2,data = train.data, kernel="linear", cost = 0.006737947 ,scale=FALSE)
    svm.y<-valid.data$Group
    svm.predy<-predict(svmfit, valid.data)
    
    ith.test.error<- mean(svm.y!=svm.predy) 
    CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)  
  }
  
  SVM.L.GS.All<-c(SVM.L.GS.All, sum(CV.error))
  
}

SVM.L.GS.All 
##   [1] 0.03728814 0.03389831 0.03559322 0.04406780 0.03559322 0.03220339
##   [7] 0.03220339 0.03389831 0.03898305 0.03898305 0.03389831 0.03389831
##  [13] 0.03050847 0.03220339 0.03559322 0.03389831 0.03389831 0.03728814
##  [19] 0.03559322 0.04237288 0.03728814 0.03050847 0.03898305 0.03220339
##  [25] 0.03389831 0.03389831 0.03559322 0.03898305 0.03559322 0.03220339
##  [31] 0.03559322 0.03559322 0.03220339 0.04067797 0.03050847 0.03559322
##  [37] 0.03220339 0.04067797 0.03898305 0.03898305 0.03220339 0.03220339
##  [43] 0.03220339 0.03728814 0.03898305 0.03559322 0.03220339 0.03728814
##  [49] 0.04237288 0.04237288 0.03898305 0.03559322 0.03220339 0.04067797
##  [55] 0.03559322 0.03050847 0.03220339 0.03389831 0.03559322 0.04067797
##  [61] 0.03220339 0.03728814 0.03050847 0.03898305 0.03389831 0.03389831
##  [67] 0.03898305 0.03559322 0.04576271 0.03220339 0.02881356 0.03220339
##  [73] 0.03559322 0.03728814 0.03559322 0.03220339 0.03389831 0.03220339
##  [79] 0.03050847 0.04406780 0.03220339 0.03559322 0.03389831 0.02881356
##  [85] 0.04237288 0.03220339 0.03050847 0.03559322 0.03898305 0.03220339
##  [91] 0.03559322 0.03559322 0.03559322 0.03898305 0.03559322 0.02881356
##  [97] 0.03220339 0.04237288 0.03389831 0.03898305 0.04745763 0.03898305
## [103] 0.03389831 0.02881356 0.03220339 0.03220339 0.03389831 0.03389831
## [109] 0.03389831 0.03389831 0.03559322 0.03559322 0.03220339 0.03220339
## [115] 0.02881356 0.03898305 0.03728814 0.04576271 0.03728814 0.03898305
## [121] 0.04067797 0.03728814 0.03389831 0.03389831 0.04237288 0.03559322
## [127] 0.03389831 0.03559322 0.03220339 0.03559322 0.03050847 0.03389831
## [133] 0.03220339 0.04067797 0.03389831 0.04576271 0.03389831 0.03898305
## [139] 0.03389831 0.03728814 0.03220339 0.03220339 0.03050847 0.03559322
## [145] 0.04067797 0.03220339 0.03559322 0.03389831 0.04067797 0.03898305
## [151] 0.03050847 0.03220339 0.03220339 0.03728814 0.03728814 0.03898305
## [157] 0.03389831 0.03559322 0.03389831 0.03389831 0.04237288 0.03389831
## [163] 0.03559322 0.03559322 0.03389831 0.03220339 0.03559322 0.03050847
## [169] 0.03898305 0.03898305 0.03220339 0.03559322 0.03559322 0.03220339
## [175] 0.04237288 0.04237288 0.03728814 0.03559322 0.03559322 0.03389831
## [181] 0.03220339 0.02711864 0.03220339 0.03220339 0.04237288 0.03728814
## [187] 0.04576271 0.03389831 0.03389831 0.03220339 0.03559322 0.04237288
## [193] 0.03050847 0.04067797 0.03898305 0.03220339 0.03898305 0.03220339
## [199] 0.03559322 0.03220339 0.03050847 0.03559322 0.03389831 0.03728814
## [205] 0.03389831 0.03559322 0.03220339 0.03389831 0.03728814 0.03389831
## [211] 0.04406780 0.03050847 0.03728814 0.03559322 0.03050847 0.04067797
## [217] 0.03559322 0.03050847 0.04067797 0.03050847 0.03559322 0.03050847
## [223] 0.04067797 0.03559322 0.03220339 0.03898305 0.03220339 0.03220339
## [229] 0.03559322 0.03389831 0.03389831 0.03050847 0.03728814 0.03559322
## [235] 0.03389831 0.03389831 0.03220339 0.03898305 0.03389831 0.03559322
## [241] 0.03389831 0.03559322 0.03389831 0.03220339 0.02711864 0.03559322
## [247] 0.03559322 0.03728814 0.03898305 0.03559322 0.03728814 0.03220339
## [253] 0.03389831 0.03050847 0.04067797 0.03728814 0.03389831 0.03389831
## [259] 0.03220339 0.03389831 0.03220339 0.03389831 0.03050847 0.03050847
## [265] 0.03559322 0.03389831 0.03898305 0.03559322 0.03389831 0.03559322
## [271] 0.03389831 0.03898305 0.03728814 0.03559322 0.03898305 0.03559322
## [277] 0.02881356 0.03898305 0.03559322 0.03898305 0.03389831 0.03559322
## [283] 0.03559322 0.03559322 0.03728814 0.03220339 0.03389831 0.03559322
## [289] 0.03559322 0.03728814 0.04576271 0.03559322 0.03728814 0.03898305
## [295] 0.04237288 0.04237288 0.03559322 0.03559322 0.03728814 0.03559322
## [301] 0.03559322 0.03559322 0.03728814 0.03559322 0.03559322 0.03559322
## [307] 0.03389831 0.03559322 0.02881356 0.03728814 0.03389831 0.03728814
## [313] 0.03728814 0.03559322 0.03728814 0.04067797 0.03389831 0.03898305
## [319] 0.03389831 0.03389831 0.03728814 0.03389831 0.03389831 0.03728814
## [325] 0.02711864 0.03559322 0.03898305 0.03389831 0.03220339 0.03389831
## [331] 0.03220339 0.03559322 0.02881356 0.04406780 0.03389831 0.03559322
## [337] 0.03898305 0.03220339 0.03389831 0.03559322 0.04067797 0.02881356
## [343] 0.03559322 0.03389831 0.03728814 0.03559322 0.03559322 0.03898305
## [349] 0.03559322 0.03728814 0.02881356 0.03389831 0.03050847 0.03728814
## [355] 0.03559322 0.03389831 0.03220339 0.03559322 0.03898305 0.03728814
## [361] 0.04067797 0.03389831 0.03389831 0.03389831 0.03050847 0.03389831
## [367] 0.03559322 0.03898305 0.03728814 0.03050847 0.03389831 0.03389831
## [373] 0.03220339 0.04067797 0.03728814 0.03389831 0.03050847 0.03559322
## [379] 0.03220339 0.03050847 0.03389831 0.03050847 0.03389831 0.03389831
## [385] 0.03050847 0.04406780 0.03728814 0.04237288 0.03389831 0.03728814
## [391] 0.03389831 0.03220339 0.03898305 0.04237288 0.03898305 0.02881356
## [397] 0.03728814 0.03559322 0.03220339 0.03728814 0.03220339 0.03898305
## [403] 0.03389831 0.03220339 0.04067797 0.03728814 0.03728814 0.03389831
## [409] 0.03389831 0.03389831 0.04067797 0.03220339 0.03220339 0.03728814
## [415] 0.03220339 0.03220339 0.03389831 0.03220339 0.03728814 0.03898305
## [421] 0.03898305 0.03898305 0.02881356 0.03220339 0.03898305 0.03559322
## [427] 0.03220339 0.03898305 0.03559322 0.03220339 0.03050847 0.03050847
## [433] 0.03898305 0.03728814 0.03559322 0.03050847 0.03898305 0.03898305
## [439] 0.03559322 0.03220339 0.03559322 0.03898305 0.03559322 0.03559322
## [445] 0.03559322 0.03389831 0.03389831 0.03728814 0.04915254 0.03050847
## [451] 0.03050847 0.03559322 0.03559322 0.03050847 0.03220339 0.03220339
## [457] 0.03220339 0.03559322 0.03559322 0.03898305 0.03389831 0.04067797
## [463] 0.03220339 0.04067797 0.04406780 0.03898305 0.03389831 0.03898305
## [469] 0.04067797 0.02711864 0.03050847 0.03728814 0.03389831 0.03728814
## [475] 0.03559322 0.03559322 0.03728814 0.04237288 0.03389831 0.03728814
## [481] 0.02881356 0.03898305 0.03220339 0.03728814 0.03389831 0.02881356
## [487] 0.03220339 0.03220339 0.03559322 0.03220339 0.02881356 0.03559322
## [493] 0.03728814 0.03050847 0.03898305 0.03220339 0.03220339 0.03050847
## [499] 0.03728814 0.04745763

4 SVM Linear BO

for(j in 1:500){
  
  set.seed(j)
  
  folds <- createFolds(y=factor(train.set$Group), k = 5, list = FALSE)
  train.set$fold <- folds
  
  
    # Using SVM linear kernel cost = 11173.88 - the best linear kernel SVM model considered.
    
     CV.error<-NULL 
  
    for (i in 1:5) { 
    valid.data <- subset(train.set, fold == i)
    train.data <- subset(train.set, fold != i) 
    
    svmfit<-svm(Group~PC1+PC2,data = train.data, kernel="linear", cost = 11173.88 ,scale=FALSE)
    svm.y<-valid.data$Group
    svm.predy<-predict(svmfit, valid.data)
    
    ith.test.error<- mean(svm.y!=svm.predy) 
    CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)  
  }
  
  SVM.L.BO.All<-c(SVM.L.BO.All, sum(CV.error))
  
}

SVM.L.BO.All 
##   [1] 0.02203390 0.03220339 0.03728814 0.03728814 0.03898305 0.03559322
##   [7] 0.02711864 0.03050847 0.03389831 0.03050847 0.02881356 0.03050847
##  [13] 0.02711864 0.04067797 0.03220339 0.03220339 0.02881356 0.03050847
##  [19] 0.02542373 0.03389831 0.02881356 0.02881356 0.03220339 0.03220339
##  [25] 0.03050847 0.03050847 0.03220339 0.02711864 0.03728814 0.02711864
##  [31] 0.02711864 0.03050847 0.02542373 0.02711864 0.02372881 0.03898305
##  [37] 0.03050847 0.02881356 0.03220339 0.02881356 0.03220339 0.02542373
##  [43] 0.03728814 0.02372881 0.02711864 0.02542373 0.03389831 0.03220339
##  [49] 0.03050847 0.03050847 0.03050847 0.03559322 0.02711864 0.04067797
##  [55] 0.03220339 0.03389831 0.03220339 0.04067797 0.02881356 0.03220339
##  [61] 0.03559322 0.03050847 0.03389831 0.04915254 0.02372881 0.03050847
##  [67] 0.03728814 0.04067797 0.03389831 0.03728814 0.03220339 0.02881356
##  [73] 0.02372881 0.03220339 0.03898305 0.03050847 0.03559322 0.02881356
##  [79] 0.03050847 0.03389831 0.03389831 0.03559322 0.03898305 0.03389831
##  [85] 0.03389831 0.03050847 0.02542373 0.02881356 0.03728814 0.02711864
##  [91] 0.03389831 0.02542373 0.02881356 0.03050847 0.03389831 0.03220339
##  [97] 0.02711864 0.03220339 0.03220339 0.02881356 0.02881356 0.03559322
## [103] 0.02711864 0.03559322 0.02881356 0.02372881 0.03389831 0.03050847
## [109] 0.03220339 0.03389831 0.03050847 0.03559322 0.02881356 0.03389831
## [115] 0.02711864 0.03389831 0.03389831 0.02711864 0.03050847 0.02542373
## [121] 0.03050847 0.04237288 0.03389831 0.02711864 0.02711864 0.03050847
## [127] 0.03220339 0.04237288 0.03728814 0.03050847 0.03898305 0.02711864
## [133] 0.02372881 0.02711864 0.02542373 0.03898305 0.03220339 0.02881356
## [139] 0.03050847 0.03220339 0.02372881 0.03389831 0.03220339 0.03220339
## [145] 0.02372881 0.02881356 0.02542373 0.02881356 0.03389831 0.03220339
## [151] 0.03728814 0.02542373 0.03220339 0.03050847 0.02711864 0.03389831
## [157] 0.02881356 0.04067797 0.03728814 0.03050847 0.02881356 0.03050847
## [163] 0.03728814 0.03559322 0.03728814 0.03389831 0.04237288 0.03389831
## [169] 0.02711864 0.03898305 0.03728814 0.03389831 0.03050847 0.03389831
## [175] 0.03728814 0.02881356 0.03389831 0.02711864 0.03050847 0.03389831
## [181] 0.02372881 0.03050847 0.03728814 0.02711864 0.03050847 0.03050847
## [187] 0.03050847 0.03559322 0.03220339 0.03220339 0.03898305 0.03728814
## [193] 0.02542373 0.03389831 0.02711864 0.02542373 0.03220339 0.03050847
## [199] 0.02711864 0.03728814 0.03220339 0.03389831 0.03050847 0.02881356
## [205] 0.02711864 0.02542373 0.02881356 0.03389831 0.03559322 0.03220339
## [211] 0.02372881 0.02711864 0.03220339 0.02372881 0.03898305 0.03050847
## [217] 0.03559322 0.04067797 0.03898305 0.03220339 0.02881356 0.02542373
## [223] 0.03220339 0.02881356 0.02881356 0.02881356 0.03389831 0.03050847
## [229] 0.02542373 0.03220339 0.03050847 0.03220339 0.03559322 0.03559322
## [235] 0.02881356 0.03389831 0.02881356 0.03559322 0.03220339 0.03050847
## [241] 0.03220339 0.03050847 0.03389831 0.02881356 0.03050847 0.03728814
## [247] 0.03728814 0.02711864 0.03220339 0.03728814 0.02881356 0.02711864
## [253] 0.03559322 0.02203390 0.03050847 0.03050847 0.02881356 0.04237288
## [259] 0.03050847 0.03050847 0.03389831 0.03050847 0.02203390 0.02881356
## [265] 0.02881356 0.03050847 0.02881356 0.03389831 0.03559322 0.03728814
## [271] 0.03559322 0.02881356 0.03389831 0.03220339 0.03389831 0.03050847
## [277] 0.03389831 0.02881356 0.03050847 0.03050847 0.02542373 0.03220339
## [283] 0.02711864 0.02881356 0.03559322 0.02542373 0.03220339 0.03728814
## [289] 0.03220339 0.02542373 0.03389831 0.03559322 0.02881356 0.02881356
## [295] 0.03220339 0.03389831 0.03220339 0.03050847 0.03050847 0.02881356
## [301] 0.03050847 0.03389831 0.03050847 0.03389831 0.03389831 0.03389831
## [307] 0.03050847 0.03728814 0.02542373 0.03389831 0.02881356 0.03050847
## [313] 0.03220339 0.02542373 0.03050847 0.03898305 0.03050847 0.02542373
## [319] 0.02372881 0.03050847 0.02711864 0.02881356 0.03898305 0.03389831
## [325] 0.02881356 0.03728814 0.03559322 0.03389831 0.02542373 0.03220339
## [331] 0.02711864 0.02881356 0.04237288 0.03898305 0.03220339 0.02711864
## [337] 0.02542373 0.02711864 0.03050847 0.03389831 0.02372881 0.03050847
## [343] 0.02542373 0.03050847 0.02881356 0.03220339 0.02711864 0.02881356
## [349] 0.02881356 0.03728814 0.03559322 0.02542373 0.02881356 0.03728814
## [355] 0.03389831 0.03728814 0.02372881 0.03050847 0.03220339 0.03050847
## [361] 0.03389831 0.03050847 0.03050847 0.03559322 0.02881356 0.02711864
## [367] 0.03220339 0.03898305 0.03050847 0.03389831 0.03050847 0.03220339
## [373] 0.02203390 0.04067797 0.03220339 0.03389831 0.02711864 0.02711864
## [379] 0.03389831 0.02711864 0.03728814 0.02711864 0.03220339 0.03559322
## [385] 0.02033898 0.03898305 0.03559322 0.03220339 0.03220339 0.03050847
## [391] 0.02372881 0.02711864 0.03220339 0.03220339 0.03220339 0.02542373
## [397] 0.03898305 0.04067797 0.03389831 0.03389831 0.03220339 0.03898305
## [403] 0.03559322 0.02711864 0.03559322 0.02881356 0.04406780 0.03050847
## [409] 0.03220339 0.03389831 0.03050847 0.04067797 0.02881356 0.02881356
## [415] 0.02711864 0.03050847 0.03389831 0.02881356 0.03050847 0.03220339
## [421] 0.03050847 0.03050847 0.03220339 0.03389831 0.02711864 0.03559322
## [427] 0.02711864 0.03898305 0.02542373 0.02881356 0.03220339 0.03050847
## [433] 0.03220339 0.03389831 0.02881356 0.03220339 0.02711864 0.03050847
## [439] 0.02881356 0.03220339 0.03220339 0.03728814 0.02711864 0.03050847
## [445] 0.03389831 0.02542373 0.03220339 0.02881356 0.03559322 0.03389831
## [451] 0.02542373 0.02881356 0.03050847 0.03050847 0.03728814 0.02542373
## [457] 0.04067797 0.02881356 0.03220339 0.03220339 0.02711864 0.03050847
## [463] 0.03389831 0.02711864 0.03559322 0.02711864 0.03050847 0.02881356
## [469] 0.03898305 0.02542373 0.03389831 0.03728814 0.03389831 0.03220339
## [475] 0.03389831 0.03050847 0.03050847 0.03559322 0.03050847 0.03389831
## [481] 0.03220339 0.03220339 0.03220339 0.03050847 0.02881356 0.02881356
## [487] 0.02711864 0.03050847 0.03220339 0.03050847 0.02881356 0.03389831
## [493] 0.03389831 0.03389831 0.03389831 0.02881356 0.03220339 0.03050847
## [499] 0.03220339 0.03728814

5 SVM Polynomial GS

for(j in 1:500){
  
  set.seed(j)
  
  folds <- createFolds(y=factor(train.set$Group), k = 5, list = FALSE)
  train.set$fold <- folds
  
  
    # Using SVM linear kernel cost = 11173.88 - the best linear kernel SVM model considered.
    
     CV.error<-NULL 
  
    for (i in 1:5) { 
    valid.data <- subset(train.set, fold == i)
    train.data <- subset(train.set, fold != i) 
    
    svmfit<-svm(Group~PC1+PC2,data = train.data, kernel="polynomial", cost = 25535010, degree = 3, gamma = 1 )
    svm.y<-valid.data$Group
    svm.predy<-predict(svmfit, valid.data)
    
    ith.test.error<- mean(svm.y!=svm.predy) 
    CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)  
  }
  
  SVM.P.GS.All<-c(SVM.P.GS.All, sum(CV.error))
  
}

SVM.P.GS.All
##   [1] 0.03050847 0.04067797 0.04237288 0.03728814 0.04237288 0.04576271
##   [7] 0.03728814 0.03898305 0.03559322 0.03728814 0.03220339 0.04067797
##  [13] 0.03728814 0.08813559 0.04915254 0.03728814 0.03728814 0.03728814
##  [19] 0.03898305 0.03898305 0.03898305 0.04067797 0.04237288 0.04406780
##  [25] 0.03898305 0.03898305 0.03728814 0.04067797 0.04067797 0.04237288
##  [31] 0.04067797 0.03728814 0.03220339 0.03898305 0.03728814 0.04067797
##  [37] 0.03728814 0.04067797 0.03728814 0.04745763 0.04576271 0.03559322
##  [43] 0.04576271 0.03389831 0.04067797 0.03728814 0.04237288 0.04237288
##  [49] 0.04915254 0.03898305 0.03728814 0.03220339 0.03728814 0.03389831
##  [55] 0.03728814 0.04067797 0.04237288 0.04745763 0.06610169 0.03898305
##  [61] 0.04915254 0.04406780 0.03559322 0.03898305 0.03898305 0.03728814
##  [67] 0.04237288 0.03559322 0.04406780 0.04237288 0.04067797 0.08305085
##  [73] 0.04406780 0.04576271 0.03898305 0.04406780 0.04576271 0.04237288
##  [79] 0.07627119 0.08983051 0.03728814 0.03559322 0.03389831 0.04406780
##  [85] 0.04067797 0.04406780 0.03559322 0.07288136 0.04067797 0.03898305
##  [91] 0.04576271 0.03898305 0.04406780 0.05762712 0.08813559 0.03728814
##  [97] 0.03559322 0.03559322 0.03559322 0.04237288 0.04067797 0.04406780
## [103] 0.10677966 0.03898305 0.04067797 0.03898305 0.04237288 0.04915254
## [109] 0.03728814 0.03559322 0.03728814 0.03728814 0.03728814 0.04406780
## [115] 0.04237288 0.09661017 0.04067797 0.03220339 0.03898305 0.04237288
## [121] 0.04067797 0.03728814 0.04406780 0.04237288 0.04237288 0.03559322
## [127] 0.02881356 0.03898305 0.04067797 0.03728814 0.04745763 0.03220339
## [133] 0.08135593 0.03559322 0.04067797 0.04745763 0.03389831 0.03728814
## [139] 0.03728814 0.04406780 0.04915254 0.09322034 0.09661017 0.03389831
## [145] 0.03559322 0.09152542 0.04237288 0.04237288 0.04915254 0.03898305
## [151] 0.04406780 0.03389831 0.03898305 0.03898305 0.05932203 0.04067797
## [157] 0.03728814 0.03559322 0.03728814 0.03898305 0.03220339 0.03728814
## [163] 0.04406780 0.04237288 0.04406780 0.03898305 0.03559322 0.03898305
## [169] 0.03728814 0.03220339 0.03559322 0.03559322 0.03220339 0.03389831
## [175] 0.04915254 0.03898305 0.03898305 0.04237288 0.04237288 0.04237288
## [181] 0.03389831 0.03559322 0.04237288 0.04067797 0.03898305 0.04237288
## [187] 0.04237288 0.03728814 0.04406780 0.04406780 0.03728814 0.03728814
## [193] 0.05084746 0.03559322 0.03728814 0.04067797 0.04237288 0.03898305
## [199] 0.03220339 0.04406780 0.04576271 0.04067797 0.04576271 0.03559322
## [205] 0.04067797 0.03559322 0.02881356 0.03898305 0.03559322 0.03728814
## [211] 0.03389831 0.04067797 0.04237288 0.03559322 0.03898305 0.03220339
## [217] 0.03389831 0.09661017 0.04067797 0.05084746 0.03220339 0.03389831
## [223] 0.03898305 0.04067797 0.03389831 0.04237288 0.03389831 0.04067797
## [229] 0.08983051 0.03898305 0.04237288 0.04745763 0.04915254 0.03898305
## [235] 0.04237288 0.04067797 0.03559322 0.03559322 0.03898305 0.03898305
## [241] 0.09322034 0.04067797 0.03728814 0.03050847 0.03728814 0.05084746
## [247] 0.03898305 0.04237288 0.03898305 0.04237288 0.03898305 0.03898305
## [253] 0.03728814 0.04067797 0.04406780 0.06440678 0.03898305 0.04067797
## [259] 0.03728814 0.05084746 0.03728814 0.04237288 0.04067797 0.04237288
## [265] 0.02881356 0.04067797 0.03728814 0.03728814 0.04237288 0.04067797
## [271] 0.04406780 0.06440678 0.03559322 0.04576271 0.04237288 0.12033898
## [277] 0.06610169 0.04406780 0.03389831 0.04237288 0.03559322 0.04067797
## [283] 0.04237288 0.03728814 0.03559322 0.03389831 0.03898305 0.05084746
## [289] 0.03389831 0.05762712 0.04576271 0.06779661 0.05254237 0.03728814
## [295] 0.03898305 0.04067797 0.04237288 0.04067797 0.03050847 0.04067797
## [301] 0.04237288 0.03559322 0.03559322 0.03559322 0.03728814 0.03559322
## [307] 0.04576271 0.05084746 0.04237288 0.04067797 0.04237288 0.03898305
## [313] 0.04067797 0.03559322 0.14237288 0.04406780 0.04406780 0.04406780
## [319] 0.03559322 0.03220339 0.03898305 0.03898305 0.04237288 0.03898305
## [325] 0.04745763 0.04406780 0.04576271 0.04237288 0.04576271 0.03728814
## [331] 0.04406780 0.06949153 0.04406780 0.14237288 0.03728814 0.04067797
## [337] 0.04067797 0.04237288 0.04067797 0.04406780 0.03898305 0.03728814
## [343] 0.04237288 0.03728814 0.03389831 0.04406780 0.04237288 0.03898305
## [349] 0.03898305 0.03220339 0.04915254 0.03559322 0.03389831 0.03728814
## [355] 0.03898305 0.04067797 0.08305085 0.04067797 0.04067797 0.04067797
## [361] 0.04576271 0.05254237 0.04237288 0.06610169 0.03728814 0.03559322
## [367] 0.03728814 0.04406780 0.05593220 0.03559322 0.03050847 0.06101695
## [373] 0.04067797 0.04406780 0.03559322 0.06949153 0.04576271 0.04406780
## [379] 0.03220339 0.03220339 0.08644068 0.03389831 0.03559322 0.06949153
## [385] 0.04067797 0.03728814 0.03728814 0.03898305 0.03050847 0.05423729
## [391] 0.05084746 0.03559322 0.03559322 0.03559322 0.03728814 0.04067797
## [397] 0.04067797 0.04237288 0.03728814 0.04067797 0.04406780 0.04067797
## [403] 0.04576271 0.03389831 0.04576271 0.03898305 0.04406780 0.03728814
## [409] 0.04406780 0.04237288 0.02881356 0.12203390 0.03220339 0.04576271
## [415] 0.04067797 0.04237288 0.04406780 0.03220339 0.03559322 0.03389831
## [421] 0.04406780 0.03898305 0.03898305 0.03050847 0.03898305 0.04406780
## [427] 0.03050847 0.11355932 0.03559322 0.04237288 0.04067797 0.04237288
## [433] 0.03389831 0.04406780 0.03898305 0.04915254 0.04745763 0.04237288
## [439] 0.03898305 0.03898305 0.03559322 0.04745763 0.03898305 0.04237288
## [445] 0.03559322 0.03728814 0.04237288 0.05593220 0.04067797 0.04406780
## [451] 0.03220339 0.03898305 0.03559322 0.03728814 0.04067797 0.05762712
## [457] 0.03898305 0.06949153 0.04067797 0.03559322 0.04576271 0.04237288
## [463] 0.03898305 0.04237288 0.04067797 0.04067797 0.04237288 0.03898305
## [469] 0.05084746 0.03728814 0.04406780 0.03389831 0.03559322 0.04067797
## [475] 0.03898305 0.04237288 0.03559322 0.04915254 0.04067797 0.04067797
## [481] 0.04237288 0.04067797 0.03728814 0.03898305 0.03220339 0.03559322
## [487] 0.04406780 0.04067797 0.03728814 0.03728814 0.03559322 0.04067797
## [493] 0.03728814 0.04406780 0.04915254 0.04237288 0.04406780 0.02881356
## [499] 0.03559322 0.04915254

6 SVM Polynomial BO

for(j in 1:500){
  
  set.seed(j)
  
  folds <- createFolds(y=factor(train.set$Group), k = 5, list = FALSE)
  train.set$fold <- folds
  
  
    # Using SVM linear kernel cost = 11173.88 - the best linear kernel SVM model considered.
    
     CV.error<-NULL 
  
    for (i in 1:5) { 
    valid.data <- subset(train.set, fold == i)
    train.data <- subset(train.set, fold != i) 
    
    svmfit<-svm(Group~PC1+PC2,data = train.data, kernel="polynomial", cost = 472.9585, degree = 1, gamma = 1 )
    svm.y<-valid.data$Group
    svm.predy<-predict(svmfit, valid.data)
    
    ith.test.error<- mean(svm.y!=svm.predy) 
    CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)  
  }
  
  SVM.P.BO.All<-c(SVM.P.BO.All, sum(CV.error))
  
}

SVM.P.BO.All
##   [1] 0.02711864 0.03220339 0.03559322 0.04237288 0.03050847 0.02881356
##   [7] 0.02711864 0.02881356 0.02881356 0.02881356 0.02542373 0.02711864
##  [13] 0.02711864 0.03220339 0.03220339 0.02881356 0.03220339 0.03050847
##  [19] 0.02542373 0.03220339 0.03050847 0.02881356 0.03559322 0.03559322
##  [25] 0.03220339 0.03559322 0.03050847 0.02881356 0.03220339 0.03050847
##  [31] 0.02711864 0.03050847 0.02881356 0.03389831 0.03220339 0.03220339
##  [37] 0.03220339 0.03389831 0.03220339 0.03220339 0.02711864 0.02542373
##  [43] 0.03389831 0.02372881 0.02881356 0.02711864 0.03050847 0.02711864
##  [49] 0.02881356 0.03050847 0.03050847 0.03220339 0.03220339 0.03389831
##  [55] 0.02711864 0.02542373 0.03559322 0.03389831 0.02711864 0.03220339
##  [61] 0.03389831 0.03220339 0.03050847 0.04067797 0.03050847 0.02881356
##  [67] 0.03389831 0.03559322 0.03728814 0.03728814 0.03389831 0.03389831
##  [73] 0.02203390 0.02881356 0.02881356 0.02881356 0.03728814 0.03220339
##  [79] 0.03050847 0.03050847 0.03220339 0.02881356 0.02711864 0.03220339
##  [85] 0.03050847 0.03389831 0.03050847 0.03559322 0.03559322 0.02881356
##  [91] 0.03220339 0.02542373 0.02711864 0.02881356 0.03220339 0.02711864
##  [97] 0.02542373 0.03050847 0.03050847 0.02542373 0.03220339 0.03389831
## [103] 0.03220339 0.03050847 0.03050847 0.02542373 0.03050847 0.02711864
## [109] 0.02542373 0.02881356 0.03050847 0.03220339 0.02881356 0.03050847
## [115] 0.03050847 0.03220339 0.03389831 0.02881356 0.03559322 0.02711864
## [121] 0.03220339 0.03050847 0.03389831 0.02711864 0.03220339 0.02881356
## [127] 0.02881356 0.03559322 0.03559322 0.03050847 0.02881356 0.02881356
## [133] 0.02711864 0.02372881 0.02881356 0.02881356 0.02711864 0.03220339
## [139] 0.02711864 0.03050847 0.02711864 0.02881356 0.03050847 0.02711864
## [145] 0.02881356 0.02711864 0.02542373 0.03050847 0.03050847 0.03559322
## [151] 0.02711864 0.02711864 0.03220339 0.03050847 0.03050847 0.03050847
## [157] 0.03220339 0.03220339 0.03220339 0.02542373 0.03220339 0.02711864
## [163] 0.03050847 0.03220339 0.03050847 0.02542373 0.02711864 0.02881356
## [169] 0.02372881 0.03559322 0.02881356 0.03220339 0.03050847 0.03050847
## [175] 0.03389831 0.03559322 0.03389831 0.03220339 0.03220339 0.03389831
## [181] 0.02542373 0.02711864 0.03389831 0.02711864 0.02881356 0.02542373
## [187] 0.02881356 0.03389831 0.03050847 0.03220339 0.03220339 0.03898305
## [193] 0.03728814 0.02881356 0.02881356 0.02542373 0.02881356 0.02542373
## [199] 0.02542373 0.03220339 0.03050847 0.03220339 0.03220339 0.02881356
## [205] 0.03050847 0.03220339 0.02711864 0.03728814 0.02711864 0.02711864
## [211] 0.02542373 0.03220339 0.03220339 0.02881356 0.02542373 0.03050847
## [217] 0.03050847 0.02881356 0.03389831 0.02711864 0.02881356 0.02711864
## [223] 0.02881356 0.03050847 0.02711864 0.03220339 0.03389831 0.03220339
## [229] 0.02711864 0.02372881 0.03050847 0.02542373 0.03220339 0.03389831
## [235] 0.03898305 0.03389831 0.02542373 0.03220339 0.03050847 0.02711864
## [241] 0.03559322 0.03050847 0.03050847 0.02881356 0.02711864 0.02711864
## [247] 0.03220339 0.02542373 0.02372881 0.03220339 0.03050847 0.02711864
## [253] 0.02881356 0.03220339 0.03220339 0.03559322 0.03050847 0.03220339
## [259] 0.02881356 0.03220339 0.03220339 0.03559322 0.02203390 0.03220339
## [265] 0.03389831 0.03050847 0.03050847 0.02711864 0.03220339 0.03220339
## [271] 0.03220339 0.02542373 0.03050847 0.02881356 0.03728814 0.03050847
## [277] 0.03050847 0.02372881 0.03220339 0.03220339 0.02881356 0.03050847
## [283] 0.02711864 0.02542373 0.03050847 0.02542373 0.03050847 0.03220339
## [289] 0.02711864 0.02711864 0.03728814 0.03728814 0.03050847 0.02881356
## [295] 0.03220339 0.03559322 0.03728814 0.03050847 0.02881356 0.03389831
## [301] 0.03220339 0.03389831 0.02203390 0.02881356 0.03220339 0.02881356
## [307] 0.02881356 0.03050847 0.03220339 0.02711864 0.03389831 0.03220339
## [313] 0.03389831 0.02881356 0.02881356 0.03050847 0.03728814 0.02711864
## [319] 0.02542373 0.02881356 0.03220339 0.03559322 0.03220339 0.02881356
## [325] 0.03050847 0.03220339 0.03559322 0.02881356 0.03050847 0.03050847
## [331] 0.02881356 0.02372881 0.03389831 0.03389831 0.02881356 0.03898305
## [337] 0.03050847 0.02711864 0.03050847 0.03220339 0.02881356 0.03220339
## [343] 0.02542373 0.03050847 0.02542373 0.02881356 0.02881356 0.02881356
## [349] 0.02881356 0.03389831 0.03389831 0.02881356 0.02881356 0.03728814
## [355] 0.02881356 0.03728814 0.02711864 0.03220339 0.02881356 0.02881356
## [361] 0.03559322 0.02711864 0.02711864 0.03389831 0.02542373 0.02711864
## [367] 0.03220339 0.03050847 0.02881356 0.03050847 0.02881356 0.02881356
## [373] 0.02881356 0.03898305 0.03220339 0.03898305 0.02711864 0.02542373
## [379] 0.02711864 0.02711864 0.02881356 0.02711864 0.03050847 0.02881356
## [385] 0.02372881 0.03559322 0.02881356 0.03559322 0.02542373 0.03050847
## [391] 0.02372881 0.02542373 0.03559322 0.03220339 0.02711864 0.02881356
## [397] 0.04067797 0.02881356 0.03220339 0.03220339 0.02881356 0.03728814
## [403] 0.03220339 0.02881356 0.03728814 0.03220339 0.03728814 0.03559322
## [409] 0.02881356 0.03389831 0.03050847 0.03220339 0.02711864 0.02881356
## [415] 0.03050847 0.03220339 0.03050847 0.02372881 0.02542373 0.02881356
## [421] 0.02711864 0.02881356 0.02542373 0.03050847 0.02542373 0.03728814
## [427] 0.03050847 0.03389831 0.02542373 0.02711864 0.02881356 0.03220339
## [433] 0.03559322 0.02881356 0.03220339 0.03220339 0.02711864 0.02881356
## [439] 0.02711864 0.02542373 0.03050847 0.03559322 0.02881356 0.03220339
## [445] 0.02711864 0.02881356 0.03050847 0.03220339 0.03220339 0.02711864
## [451] 0.02542373 0.02881356 0.03220339 0.03050847 0.03050847 0.02881356
## [457] 0.03220339 0.02542373 0.03050847 0.02711864 0.02881356 0.03220339
## [463] 0.02542373 0.03050847 0.03050847 0.03389831 0.02711864 0.03050847
## [469] 0.04067797 0.02372881 0.03389831 0.03220339 0.02711864 0.03050847
## [475] 0.02881356 0.02881356 0.03389831 0.03389831 0.03050847 0.03220339
## [481] 0.03220339 0.03559322 0.02711864 0.02881356 0.02542373 0.02372881
## [487] 0.02542373 0.02881356 0.03389831 0.03220339 0.02711864 0.03559322
## [493] 0.03389831 0.03050847 0.03220339 0.03389831 0.03050847 0.02711864
## [499] 0.03389831 0.03559322

7 SVM Radial GS

for(j in 1:500){
  
  set.seed(j)
  
  folds <- createFolds(y=factor(train.set$Group), k = 5, list = FALSE)
  train.set$fold <- folds
  
  
    # Using SVM linear kernel cost = 11173.88 - the best linear kernel SVM model considered.
    
     CV.error<-NULL 
  
    for (i in 1:5) { 
    valid.data <- subset(train.set, fold == i)
    train.data <- subset(train.set, fold != i) 
    
    svmfit<-svm(Group~PC1+PC2,data = train.data, kernel="radial", cost = 229815093, gamma = 0.0001234098 )
    svm.y<-valid.data$Group
    svm.predy<-predict(svmfit, valid.data)
    
    ith.test.error<- mean(svm.y!=svm.predy) 
    CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)  
  }
  
  SVM.R.GS.All<-c(SVM.R.GS.All, sum(CV.error))
  
}

SVM.R.GS.All
##   [1] 0.02711864 0.03389831 0.03728814 0.03898305 0.03559322 0.03389831
##   [7] 0.02881356 0.03389831 0.03559322 0.04067797 0.03050847 0.02881356
##  [13] 0.03220339 0.03389831 0.03389831 0.03389831 0.02881356 0.02881356
##  [19] 0.02711864 0.03389831 0.03389831 0.03559322 0.04237288 0.03389831
##  [25] 0.03050847 0.03898305 0.03220339 0.03050847 0.03898305 0.03220339
##  [31] 0.03050847 0.03220339 0.03220339 0.03728814 0.03898305 0.03389831
##  [37] 0.03389831 0.04067797 0.03220339 0.04067797 0.02881356 0.02542373
##  [43] 0.04067797 0.03050847 0.03220339 0.03220339 0.02542373 0.03220339
##  [49] 0.03559322 0.03559322 0.03728814 0.04237288 0.03898305 0.03728814
##  [55] 0.02881356 0.02542373 0.03559322 0.03559322 0.02711864 0.03898305
##  [61] 0.03220339 0.04237288 0.03898305 0.04237288 0.03559322 0.03220339
##  [67] 0.04406780 0.03559322 0.03389831 0.04237288 0.03389831 0.03389831
##  [73] 0.03559322 0.03389831 0.03728814 0.03559322 0.03050847 0.03389831
##  [79] 0.03389831 0.03559322 0.03050847 0.02542373 0.03220339 0.03220339
##  [85] 0.03389831 0.03389831 0.03728814 0.03728814 0.03898305 0.03559322
##  [91] 0.03559322 0.02711864 0.02881356 0.03050847 0.03220339 0.03050847
##  [97] 0.02372881 0.03559322 0.03050847 0.02711864 0.03389831 0.04237288
## [103] 0.03728814 0.03559322 0.03559322 0.02542373 0.03728814 0.03389831
## [109] 0.03728814 0.03898305 0.03728814 0.03389831 0.02711864 0.03389831
## [115] 0.03389831 0.03728814 0.03728814 0.03220339 0.04237288 0.02372881
## [121] 0.03728814 0.03898305 0.03559322 0.02881356 0.03389831 0.03728814
## [127] 0.03220339 0.03728814 0.04067797 0.03220339 0.03559322 0.03220339
## [133] 0.02881356 0.02372881 0.03559322 0.03559322 0.03050847 0.03220339
## [139] 0.03220339 0.03220339 0.02711864 0.03220339 0.02542373 0.03050847
## [145] 0.03389831 0.02711864 0.02372881 0.03050847 0.03898305 0.03050847
## [151] 0.03050847 0.03220339 0.04067797 0.03728814 0.03050847 0.03389831
## [157] 0.03050847 0.03220339 0.03050847 0.02711864 0.03050847 0.03898305
## [163] 0.03050847 0.03220339 0.03389831 0.02711864 0.02711864 0.03050847
## [169] 0.03050847 0.03728814 0.03389831 0.04067797 0.03389831 0.02881356
## [175] 0.03559322 0.03220339 0.03050847 0.03728814 0.02881356 0.03559322
## [181] 0.03220339 0.02881356 0.03220339 0.03728814 0.02881356 0.02711864
## [187] 0.03728814 0.03389831 0.03559322 0.04067797 0.03050847 0.04067797
## [193] 0.03389831 0.03559322 0.03389831 0.03389831 0.03220339 0.02881356
## [199] 0.02881356 0.03898305 0.02711864 0.04067797 0.03559322 0.02542373
## [205] 0.03220339 0.03559322 0.03050847 0.04067797 0.03389831 0.02542373
## [211] 0.02542373 0.03050847 0.03898305 0.03050847 0.02881356 0.03389831
## [217] 0.03389831 0.03220339 0.03559322 0.02881356 0.03389831 0.03050847
## [223] 0.03898305 0.02711864 0.03898305 0.03898305 0.03050847 0.02881356
## [229] 0.03389831 0.03050847 0.03559322 0.03220339 0.03898305 0.03389831
## [235] 0.04237288 0.03389831 0.02711864 0.02881356 0.02881356 0.03050847
## [241] 0.04067797 0.03389831 0.03389831 0.02881356 0.02881356 0.03728814
## [247] 0.03728814 0.03050847 0.02711864 0.03389831 0.02881356 0.03559322
## [253] 0.03389831 0.03389831 0.02881356 0.03728814 0.03050847 0.03559322
## [259] 0.04067797 0.03559322 0.03220339 0.03898305 0.02711864 0.03389831
## [265] 0.03050847 0.03220339 0.03050847 0.02711864 0.04067797 0.03728814
## [271] 0.03728814 0.02881356 0.03559322 0.03220339 0.03559322 0.03559322
## [277] 0.03220339 0.02881356 0.02881356 0.02881356 0.03728814 0.04067797
## [283] 0.02881356 0.03050847 0.04576271 0.03050847 0.03220339 0.03728814
## [289] 0.03050847 0.03050847 0.04237288 0.03728814 0.03559322 0.03220339
## [295] 0.03220339 0.03050847 0.04237288 0.03220339 0.02881356 0.03898305
## [301] 0.03050847 0.04067797 0.02542373 0.03389831 0.03559322 0.03220339
## [307] 0.03389831 0.03220339 0.03728814 0.03728814 0.04576271 0.03220339
## [313] 0.04067797 0.03389831 0.03559322 0.03389831 0.04067797 0.03389831
## [319] 0.02711864 0.03050847 0.02881356 0.04067797 0.03559322 0.03898305
## [325] 0.03220339 0.03389831 0.04067797 0.03220339 0.03559322 0.03389831
## [331] 0.03389831 0.02881356 0.04237288 0.03728814 0.03559322 0.03898305
## [337] 0.03559322 0.03220339 0.03220339 0.03559322 0.02711864 0.03728814
## [343] 0.03220339 0.03898305 0.03050847 0.02881356 0.02711864 0.03050847
## [349] 0.03559322 0.03559322 0.04237288 0.03389831 0.03728814 0.03220339
## [355] 0.03728814 0.04067797 0.03050847 0.03728814 0.04067797 0.03220339
## [361] 0.03050847 0.03898305 0.03559322 0.03389831 0.03220339 0.03728814
## [367] 0.03559322 0.02881356 0.03898305 0.03050847 0.03050847 0.03389831
## [373] 0.03220339 0.04576271 0.03559322 0.04406780 0.03220339 0.02542373
## [379] 0.03389831 0.03220339 0.03559322 0.02711864 0.03050847 0.03220339
## [385] 0.02372881 0.03898305 0.03389831 0.04067797 0.02711864 0.02711864
## [391] 0.02711864 0.02711864 0.03389831 0.03898305 0.03050847 0.03220339
## [397] 0.03389831 0.03220339 0.02881356 0.02881356 0.03559322 0.04406780
## [403] 0.03728814 0.03050847 0.04237288 0.03559322 0.03389831 0.03050847
## [409] 0.03220339 0.02881356 0.03220339 0.04237288 0.02881356 0.03898305
## [415] 0.03389831 0.03728814 0.03050847 0.02881356 0.03389831 0.03050847
## [421] 0.02711864 0.03559322 0.03898305 0.02711864 0.03389831 0.03559322
## [427] 0.03559322 0.04745763 0.03728814 0.02881356 0.02711864 0.03389831
## [433] 0.03728814 0.03389831 0.03220339 0.03728814 0.03898305 0.03728814
## [439] 0.02881356 0.02881356 0.03050847 0.03559322 0.03559322 0.03050847
## [445] 0.03050847 0.02542373 0.02372881 0.02881356 0.03559322 0.03389831
## [451] 0.02542373 0.02881356 0.04067797 0.03559322 0.03898305 0.03728814
## [457] 0.03389831 0.03220339 0.03050847 0.02881356 0.03898305 0.04067797
## [463] 0.03559322 0.03728814 0.03220339 0.02711864 0.03220339 0.03559322
## [469] 0.03898305 0.02542373 0.03389831 0.03559322 0.02881356 0.03898305
## [475] 0.03220339 0.03389831 0.03220339 0.03898305 0.03389831 0.03050847
## [481] 0.03728814 0.03389831 0.03389831 0.03220339 0.02372881 0.03389831
## [487] 0.03220339 0.03050847 0.03050847 0.03050847 0.03559322 0.03898305
## [493] 0.02881356 0.03050847 0.03728814 0.03389831 0.03389831 0.03559322
## [499] 0.03389831 0.03559322

8 SVM Radial BO

for(j in 1:500){
  
  set.seed(j)
  
  folds <- createFolds(y=factor(train.set$Group), k = 5, list = FALSE)
  train.set$fold <- folds
  
  
    # Using SVM linear kernel cost = 11173.88 - the best linear kernel SVM model considered.
    
     CV.error<-NULL 
  
    for (i in 1:5) { 
    valid.data <- subset(train.set, fold == i)
    train.data <- subset(train.set, fold != i) 
    
    svmfit<-svm(Group~PC1+PC2,data = train.data, kernel="radial", cost = 1538065, gamma = 0.02650774)
    svm.y<-valid.data$Group
    svm.predy<-predict(svmfit, valid.data)
    
    ith.test.error<- mean(svm.y!=svm.predy) 
    CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)  
  }
  
  SVM.R.BO.All<-c(SVM.R.BO.All, sum(CV.error))
  
}

SVM.R.BO.All
##   [1] 0.02203390 0.03389831 0.02542373 0.03559322 0.03220339 0.03389831
##   [7] 0.02881356 0.02881356 0.02881356 0.03050847 0.02542373 0.02711864
##  [13] 0.02542373 0.02881356 0.03389831 0.03220339 0.03050847 0.03050847
##  [19] 0.02711864 0.03050847 0.03050847 0.02711864 0.03050847 0.02881356
##  [25] 0.03050847 0.02542373 0.02372881 0.03050847 0.03050847 0.02881356
##  [31] 0.02542373 0.02881356 0.02203390 0.02711864 0.03559322 0.03050847
##  [37] 0.02881356 0.02711864 0.03220339 0.02881356 0.02542373 0.02711864
##  [43] 0.03050847 0.02542373 0.03050847 0.02542373 0.02542373 0.03050847
##  [49] 0.02542373 0.03050847 0.02881356 0.03389831 0.03389831 0.03389831
##  [55] 0.02372881 0.02711864 0.02881356 0.03389831 0.02372881 0.03050847
##  [61] 0.04067797 0.03389831 0.02372881 0.04576271 0.03050847 0.02542373
##  [67] 0.03898305 0.03220339 0.03559322 0.03389831 0.02881356 0.02881356
##  [73] 0.02033898 0.03220339 0.03389831 0.02881356 0.03389831 0.03050847
##  [79] 0.03050847 0.02881356 0.02711864 0.02881356 0.02711864 0.03559322
##  [85] 0.03220339 0.03898305 0.02542373 0.03389831 0.03559322 0.02372881
##  [91] 0.03050847 0.02711864 0.03220339 0.02711864 0.03220339 0.02203390
##  [97] 0.02203390 0.04067797 0.02881356 0.02372881 0.03220339 0.04406780
## [103] 0.02372881 0.03050847 0.02881356 0.02033898 0.03389831 0.03220339
## [109] 0.02711864 0.03220339 0.02711864 0.02711864 0.02711864 0.03389831
## [115] 0.02711864 0.03050847 0.03559322 0.02881356 0.03220339 0.02881356
## [121] 0.04237288 0.04406780 0.03050847 0.02711864 0.03389831 0.03559322
## [127] 0.02542373 0.03050847 0.03559322 0.03050847 0.02881356 0.02542373
## [133] 0.03050847 0.02542373 0.02711864 0.04237288 0.02711864 0.02372881
## [139] 0.02542373 0.03220339 0.02203390 0.02881356 0.02542373 0.02542373
## [145] 0.02711864 0.02372881 0.03389831 0.02881356 0.03050847 0.02881356
## [151] 0.02881356 0.02542373 0.03389831 0.03050847 0.02711864 0.03389831
## [157] 0.02542373 0.03220339 0.03050847 0.02542373 0.02711864 0.02711864
## [163] 0.03220339 0.03559322 0.02881356 0.02881356 0.02881356 0.03220339
## [169] 0.02711864 0.03559322 0.04067797 0.03220339 0.02881356 0.03220339
## [175] 0.03728814 0.03559322 0.02881356 0.03559322 0.02881356 0.03898305
## [181] 0.02542373 0.02203390 0.03050847 0.02542373 0.02711864 0.03220339
## [187] 0.02711864 0.03050847 0.02881356 0.03559322 0.03220339 0.03728814
## [193] 0.03220339 0.02542373 0.02372881 0.02881356 0.02711864 0.02711864
## [199] 0.02881356 0.02372881 0.02711864 0.02881356 0.02711864 0.02372881
## [205] 0.03050847 0.02881356 0.02542373 0.03728814 0.02542373 0.02542373
## [211] 0.02881356 0.03050847 0.03559322 0.03050847 0.03389831 0.02542373
## [217] 0.03220339 0.03728814 0.03220339 0.02881356 0.02711864 0.02711864
## [223] 0.03050847 0.03050847 0.02881356 0.02881356 0.02542373 0.02542373
## [229] 0.03559322 0.02881356 0.03389831 0.02372881 0.03050847 0.02881356
## [235] 0.03050847 0.02881356 0.02881356 0.02881356 0.02881356 0.02711864
## [241] 0.03220339 0.02711864 0.03389831 0.03220339 0.03220339 0.03050847
## [247] 0.03389831 0.03050847 0.02372881 0.02542373 0.02203390 0.03220339
## [253] 0.03559322 0.03559322 0.02542373 0.03220339 0.03050847 0.03728814
## [259] 0.02711864 0.03220339 0.02881356 0.03898305 0.02542373 0.02542373
## [265] 0.02711864 0.03050847 0.02881356 0.03389831 0.03898305 0.02711864
## [271] 0.03220339 0.02033898 0.02711864 0.03220339 0.03220339 0.03050847
## [277] 0.02881356 0.02372881 0.02542373 0.02881356 0.02711864 0.02881356
## [283] 0.03050847 0.02881356 0.03389831 0.02203390 0.02881356 0.03728814
## [289] 0.02033898 0.02711864 0.04237288 0.03898305 0.03050847 0.02372881
## [295] 0.02881356 0.03559322 0.03220339 0.02711864 0.02203390 0.02881356
## [301] 0.02542373 0.03050847 0.02711864 0.03220339 0.03389831 0.03389831
## [307] 0.02881356 0.03389831 0.03050847 0.02711864 0.03728814 0.02881356
## [313] 0.03728814 0.02711864 0.02881356 0.03559322 0.03559322 0.02881356
## [319] 0.02881356 0.02881356 0.02881356 0.03220339 0.04067797 0.03728814
## [325] 0.03050847 0.02881356 0.03728814 0.03559322 0.02881356 0.03220339
## [331] 0.03220339 0.02542373 0.02881356 0.03050847 0.03389831 0.04406780
## [337] 0.03728814 0.03220339 0.02881356 0.03050847 0.03220339 0.03389831
## [343] 0.02542373 0.02542373 0.02881356 0.03050847 0.02711864 0.03050847
## [349] 0.03050847 0.02711864 0.03898305 0.02372881 0.03050847 0.02881356
## [355] 0.02881356 0.03220339 0.02542373 0.02372881 0.02881356 0.03050847
## [361] 0.02881356 0.02881356 0.02711864 0.03728814 0.03050847 0.02203390
## [367] 0.03220339 0.03389831 0.03728814 0.02881356 0.02372881 0.02881356
## [373] 0.03389831 0.04067797 0.02711864 0.03220339 0.02711864 0.02881356
## [379] 0.02542373 0.01864407 0.03220339 0.02711864 0.02881356 0.02542373
## [385] 0.02372881 0.03220339 0.02711864 0.03220339 0.03050847 0.03050847
## [391] 0.01864407 0.02881356 0.03389831 0.02711864 0.02881356 0.03389831
## [397] 0.03220339 0.02711864 0.03220339 0.03728814 0.03559322 0.02881356
## [403] 0.03728814 0.02033898 0.02711864 0.03050847 0.04576271 0.03050847
## [409] 0.02542373 0.04067797 0.02711864 0.03220339 0.02711864 0.03728814
## [415] 0.03220339 0.03898305 0.03559322 0.02033898 0.02711864 0.03050847
## [421] 0.02711864 0.03050847 0.03220339 0.02711864 0.03220339 0.03389831
## [427] 0.02881356 0.04067797 0.03050847 0.02542373 0.02711864 0.03050847
## [433] 0.03220339 0.03050847 0.03050847 0.03220339 0.03559322 0.02542373
## [439] 0.02881356 0.03050847 0.03050847 0.03559322 0.02033898 0.03050847
## [445] 0.02542373 0.02542373 0.03220339 0.02881356 0.03559322 0.03389831
## [451] 0.02711864 0.02711864 0.03728814 0.03220339 0.03050847 0.02881356
## [457] 0.03728814 0.02372881 0.02711864 0.02881356 0.03389831 0.03559322
## [463] 0.02711864 0.03389831 0.02881356 0.02542373 0.03050847 0.02881356
## [469] 0.04067797 0.01864407 0.02542373 0.03220339 0.02542373 0.03220339
## [475] 0.03050847 0.03050847 0.03389831 0.03050847 0.02711864 0.02881356
## [481] 0.03050847 0.03220339 0.02542373 0.03389831 0.02372881 0.02372881
## [487] 0.03220339 0.02881356 0.03898305 0.03389831 0.02711864 0.03050847
## [493] 0.02711864 0.03728814 0.03559322 0.03559322 0.02881356 0.02372881
## [499] 0.02542373 0.03559322

Boxplots

QDA.<-data.frame(Classifier="QDA.", CV.Error.Rate=QDA.All)
SVM.L.GS.<-data.frame(Classifier="SVM.L.GS.", CV.Error.Rate=SVM.L.GS.All)
SVM.L.BO.<-data.frame(Classifier="SVM.L.BO.", CV.Error.Rate=SVM.L.BO.All)
SVM.P.GS.<-data.frame(Classifier="SVM.P.GS.", CV.Error.Rate=SVM.P.GS.All)
SVM.P.BO.<-data.frame(Classifier="SVM.P.BO.", CV.Error.Rate=SVM.P.BO.All)  
SVM.R.GS.<-data.frame(Classifier="SVM.R.GS.", CV.Error.Rate=SVM.R.GS.All)
SVM.R.BO.<-data.frame(Classifier="SVM.R.BO.", CV.Error.Rate=SVM.R.BO.All)

df<-rbind(QDA., SVM.L.GS., SVM.L.BO., SVM.P.GS., SVM.P.BO., SVM.R.GS., SVM.R.BO.)

  ggplot(df, aes(x=Classifier, y=CV.Error.Rate))+geom_boxplot(color = "green")+geom_jitter(alpha=0.1) + ylab("CV Error Rate")

summary(QDA.All)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
## 0.04068 0.05085 0.05254 0.05290 0.05593 0.06610
quantile(QDA.All, probs=c(0.025, 0.975))
##       2.5%      97.5% 
## 0.04576271 0.06101695
sd(QDA.All)
## [1] 0.003817152
summary(SVM.L.GS.All)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
## 0.02712 0.03220 0.03559 0.03535 0.03729 0.04915
quantile(SVM.L.GS.All, probs=c(0.025, 0.975))
##       2.5%      97.5% 
## 0.02881356 0.04406780
sd(SVM.L.GS.All)
## [1] 0.003651733
summary(SVM.L.BO.All)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
## 0.02034 0.02881 0.03051 0.03154 0.03390 0.04915
quantile(SVM.L.BO.All , probs=c(0.025, 0.975))
##       2.5%      97.5% 
## 0.02372881 0.04067797
sd(SVM.L.BO.All)
## [1] 0.004253696
summary(SVM.P.GS.All)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
## 0.02881 0.03729 0.04068 0.04314 0.04407 0.14240
quantile(SVM.P.GS.All , probs=c(0.025, 0.975))
##       2.5%      97.5% 
## 0.03220339 0.08983051
sd(SVM.P.GS.All)
## [1] 0.01386288
summary(SVM.P.BO.All)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
## 0.02203 0.02839 0.03051 0.03039 0.03220 0.04237
quantile(SVM.P.BO.All , probs=c(0.025, 0.975))
##       2.5%      97.5% 
## 0.02372881 0.03728814
sd(SVM.P.BO.All)
## [1] 0.003461828
summary(SVM.R.GS.All)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
## 0.02373 0.03051 0.03390 0.03362 0.03729 0.04746
quantile(SVM.R.GS.All , probs=c(0.025, 0.975))
##       2.5%      97.5% 
## 0.02542373 0.04237288
sd(SVM.R.GS.All)
## [1] 0.004460009
summary(SVM.R.BO.All)
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
## 0.01864 0.02712 0.02881 0.03008 0.03220 0.04576
quantile(SVM.R.BO.All , probs=c(0.025, 0.975))
##       2.5%      97.5% 
## 0.02203390 0.04067797
sd(SVM.R.BO.All)
## [1] 0.004573145