## Warning: package 'GGally' was built under R version 3.3.3
## Warning: package 'ggplot2' was built under R version 3.3.3
## Warning: package 'DT' was built under R version 3.3.3
## Warning: package 'corrplot' was built under R version 3.3.3
## Warning: package 'klaR' was built under R version 3.3.3
## Warning: package 'plotly' was built under R version 3.3.3
## Warning: package 'leaflet' was built under R version 3.3.3
## Warning: package 'readxl' was built under R version 3.3.3
## Warning: package 'rBayesianOptimization' was built under R version 3.3.3
ArcLakeGroupSummary <- read_csv("M:/ArcLakeGroupSummary.csv")
dundeedata <- read_csv("M:/dundeedata.csv")
colnames(dundeedata)[1]<-"GloboLakes_ID" # change the GloboLID column name to GloboLakes_ID to make the merge easier.
Data<-merge(ArcLakeGroupSummary, dundeedata, by = "GloboLakes_ID", all = TRUE )
Data<-subset(Data, Group!="NA") # The data set is back to the original 732 rows just with extra columns of information
Data$Group<-as.factor(Data$Group)
In order to use each model, I prepare a suitable data frame - splitting it into training and test sets and then splitting the training set into 5 folds.
Data2<-data.frame(Data[, c("Group", "Latitude", "Longitude", "OverallAvg")])
# Stratify the entire training set into training and test sets
set.seed(234)
library(caret)
train.index<-createDataPartition(Data2$Group, p=0.8, list = FALSE)
train.set<-Data2[train.index, ]
test.set<-Data2[-train.index, ]
# Stratify the training set into 5 folds
folds <- createFolds(y=factor(train.set$Group), k = 5, list = FALSE)
train.set$fold <- folds
Three main ways of choosing hyperparameters apart from selecting all possible combinations or just randomly performing a self selected sweep of what we think would perform well is to use a
#linear
svm_fit_bayes<-function(logCost){
CV.error<-NULL
for (i in 1:5) {
valid.data <- subset(train.set, fold == i)
train.data <- subset(train.set, fold != i)
svmfit<-svm(Group~Longitude + Latitude + OverallAvg, data = train.data, kernel="linear",
cost=exp(logCost), scale= FALSE)
svm.y<-valid.data$Group
svm.predy<-predict(svmfit, valid.data)
ith.test.error<- mean(svm.y!=svm.predy)
CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)
}
list(Score=-sum(CV.error), pred=0)
}
set.seed(234)
OPT_Res<- BayesianOptimization(svm_fit_bayes, bounds= list(logCost = c(-5, 20)),
init_grid_dt = NULL, init_points = 50,
n_iter = 20, acq = "ucb", kappa =2.576,
eps=0, verbose = TRUE)
## elapsed = 16.50 Round = 1 logCost = 13.6405 Value = -0.1119
## elapsed = 16.81 Round = 2 logCost = 14.5428 Value = -0.1102
## elapsed = 0.11 Round = 3 logCost = -4.4991 Value = -0.1102
## elapsed = 16.81 Round = 4 logCost = 14.4021 Value = -0.1102
## elapsed = 0.15 Round = 5 logCost = -3.3272 Value = -0.0746
## elapsed = 15.11 Round = 6 logCost = 11.1199 Value = -0.1475
## elapsed = 19.91 Round = 7 logCost = 18.2346 Value = -0.1305
## elapsed = 16.52 Round = 8 logCost = 12.9411 Value = -0.1271
## elapsed = 19.35 Round = 9 logCost = 18.1934 Value = -0.1220
## elapsed = 0.97 Round = 10 logCost = 2.1058 Value = -0.0576
## elapsed = 13.38 Round = 11 logCost = 8.8931 Value = -0.0661
## elapsed = 13.95 Round = 12 logCost = 8.6925 Value = -0.0797
## elapsed = 14.31 Round = 13 logCost = 9.5712 Value = -0.0627
## elapsed = 14.48 Round = 14 logCost = 9.5747 Value = -0.0712
## elapsed = 0.11 Round = 15 logCost = -4.9700 Value = -0.1390
## elapsed = 9.45 Round = 16 logCost = 6.0279 Value = -0.1220
## elapsed = 1.36 Round = 17 logCost = 2.8288 Value = -0.0593
## elapsed = 15.82 Round = 18 logCost = 13.5004 Value = -0.1339
## elapsed = 0.13 Round = 19 logCost = -1.5418 Value = -0.0644
## elapsed = 17.97 Round = 20 logCost = 16.7944 Value = -0.1169
## elapsed = 13.09 Round = 21 logCost = 8.0767 Value = -0.1542
## elapsed = 13.45 Round = 22 logCost = 9.4776 Value = -0.0610
## elapsed = 17.45 Round = 23 logCost = 16.6300 Value = -0.1271
## elapsed = 13.83 Round = 24 logCost = 10.4356 Value = -0.1254
## elapsed = 11.69 Round = 25 logCost = 7.2445 Value = -0.0729
## elapsed = 4.40 Round = 26 logCost = 4.3700 Value = -0.0576
## elapsed = 14.75 Round = 27 logCost = 12.4167 Value = -0.1153
## elapsed = 0.27 Round = 28 logCost = -0.2072 Value = -0.0610
## elapsed = 16.82 Round = 29 logCost = 15.9732 Value = -0.1153
## elapsed = 17.47 Round = 30 logCost = 17.0627 Value = -0.1169
## elapsed = 13.85 Round = 31 logCost = 10.4807 Value = -0.1153
## elapsed = 1.12 Round = 32 logCost = 1.3078 Value = -0.0593
## elapsed = 0.19 Round = 33 logCost = -0.5958 Value = -0.0610
## elapsed = 15.95 Round = 34 logCost = 12.6207 Value = -0.1254
## elapsed = 14.02 Round = 35 logCost = 8.3371 Value = -0.1390
## elapsed = 15.20 Round = 36 logCost = 12.2033 Value = -0.1356
## elapsed = 15.02 Round = 37 logCost = 12.5207 Value = -0.1271
## elapsed = 0.20 Round = 38 logCost = -1.1632 Value = -0.0627
## elapsed = 12.34 Round = 39 logCost = 7.6583 Value = -0.1305
## elapsed = 3.49 Round = 40 logCost = 3.8680 Value = -0.0627
## elapsed = 13.72 Round = 41 logCost = 9.5338 Value = -0.0661
## elapsed = 18.25 Round = 42 logCost = 17.6858 Value = -0.1237
## elapsed = 17.94 Round = 43 logCost = 16.1313 Value = -0.1254
## elapsed = 0.56 Round = 44 logCost = 0.8071 Value = -0.0576
## elapsed = 14.70 Round = 45 logCost = 11.4682 Value = -0.1593
## elapsed = 16.31 Round = 46 logCost = 14.4283 Value = -0.1186
## elapsed = 0.69 Round = 47 logCost = 1.0083 Value = -0.0576
## elapsed = 13.72 Round = 48 logCost = 10.7092 Value = -0.1695
## elapsed = 13.73 Round = 49 logCost = 9.8284 Value = -0.0678
## elapsed = 0.14 Round = 50 logCost = -1.8423 Value = -0.0627
## elapsed = 0.12 Round = 51 logCost = -2.6935 Value = -0.0695
## elapsed = 0.09 Round = 52 logCost = -3.7109 Value = -0.0763
## elapsed = 0.12 Round = 53 logCost = -3.1431 Value = -0.0746
## elapsed = 0.09 Round = 54 logCost = -4.4830 Value = -0.1085
## elapsed = 0.11 Round = 55 logCost = -3.4985 Value = -0.0763
## elapsed = 0.09 Round = 56 logCost = -4.9310 Value = -0.1356
## elapsed = 0.33 Round = 57 logCost = 0.1372 Value = -0.0593
## elapsed = 0.92 Round = 58 logCost = 1.5543 Value = -0.0576
## elapsed = 0.56 Round = 59 logCost = 0.8579 Value = -0.0576
## elapsed = 0.91 Round = 60 logCost = 1.6134 Value = -0.0559
## elapsed = 1.58 Round = 61 logCost = 3.1338 Value = -0.0610
## elapsed = 0.18 Round = 62 logCost = -0.7877 Value = -0.0627
## elapsed = 0.81 Round = 63 logCost = 1.2517 Value = -0.0610
## elapsed = 0.72 Round = 64 logCost = 1.0299 Value = -0.0576
## elapsed = 0.21 Round = 65 logCost = -0.3874 Value = -0.0627
## elapsed = 0.75 Round = 66 logCost = 1.8455 Value = -0.0559
## elapsed = 0.78 Round = 67 logCost = 1.7514 Value = -0.0559
## elapsed = 0.87 Round = 68 logCost = 1.2016 Value = -0.0610
## elapsed = 0.55 Round = 69 logCost = 0.7400 Value = -0.0593
## elapsed = 1.59 Round = 70 logCost = 3.0187 Value = -0.0593
##
## Best Parameters Found:
## Round = 60 logCost = 1.6134 Value = -0.0559
OPT_Res$Best_Par
## logCost
## 1.613397
as.numeric(exp(OPT_Res$Best_Par["logCost"]))
## [1] 5.019835
CV.error<-NULL
for (i in 1:5) {
valid.data <- subset(train.set, fold == i)
train.data <- subset(train.set, fold != i)
svmfit<-svm(Group~Longitude + Latitude + OverallAvg, data = train.data, kernel="linear",
cost=exp(OPT_Res$Best_Par["logCost"]), scale = FALSE)
svm.y<-valid.data$Group
svm.predy<-predict(svmfit, valid.data)
ith.test.error<- mean(svm.y!=svm.predy)
CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)
}
sum(CV.error)
## [1] 0.0559322
svm_fit_bayes<-function(logCost, logGamma, Degree){
CV.error<-NULL
for (i in 1:5) {
valid.data <- subset(train.set, fold == i)
train.data <- subset(train.set, fold != i)
svmfit<-svm(Group~Longitude + Latitude + OverallAvg, data = train.data, kernel="polynomial",
cost=exp(logCost), gamma=exp(logGamma), degree=Degree)
svm.y<-valid.data$Group
svm.predy<-predict(svmfit, valid.data)
ith.test.error<- mean(svm.y!=svm.predy)
CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)
}
list(Score=-sum(CV.error), pred=0)
}
set.seed(234)
OPT_Res<- BayesianOptimization(svm_fit_bayes, bounds= list(logCost = c(-5, 20),
logGamma = c(-9, 5),
Degree = c(1L, 5L)),
init_grid_dt = NULL, init_points = 50,
n_iter = 20, acq = "ucb", kappa =2.576,
eps=0, verbose = TRUE)
## elapsed = 9.78 Round = 1 logCost = 13.6405 logGamma = -0.1100 Degree = 4.0000 Value = -0.0949
## elapsed = 0.06 Round = 2 logCost = 14.5428 logGamma = -3.6803 Degree = 4.0000 Value = -0.1034
## elapsed = 0.09 Round = 3 logCost = -4.4991 logGamma = -6.8452 Degree = 2.0000 Value = -0.6678
## elapsed = 9.19 Round = 4 logCost = 14.4021 logGamma = 2.3082 Degree = 3.0000 Value = -0.1729
## elapsed = 0.09 Round = 5 logCost = -3.3272 logGamma = -7.8940 Degree = 2.0000 Value = -0.6678
## elapsed = 0.11 Round = 6 logCost = 11.1199 logGamma = -6.9799 Degree = 5.0000 Value = -0.6678
## elapsed = 0.07 Round = 7 logCost = 18.2346 logGamma = -7.7917 Degree = 2.0000 Value = -0.0729
## elapsed = 0.13 Round = 8 logCost = 12.9411 logGamma = -2.5374 Degree = 2.0000 Value = -0.0610
## elapsed = 16.48 Round = 9 logCost = 18.1934 logGamma = 2.3218 Degree = 2.0000 Value = -0.1881
## elapsed = 0.06 Round = 10 logCost = 2.1058 logGamma = 0.5545 Degree = 3.0000 Value = -0.0542
## elapsed = 0.08 Round = 11 logCost = 8.8931 logGamma = -3.8455 Degree = 4.0000 Value = -0.3814
## elapsed = 0.06 Round = 12 logCost = 8.6925 logGamma = -2.1492 Degree = 2.0000 Value = -0.0729
## elapsed = 0.09 Round = 13 logCost = 9.5712 logGamma = -5.4319 Degree = 3.0000 Value = -0.4525
## elapsed = 9.58 Round = 14 logCost = 9.5747 logGamma = 3.1058 Degree = 3.0000 Value = -0.1458
## elapsed = 0.11 Round = 15 logCost = -4.9700 logGamma = -2.5619 Degree = 2.0000 Value = -0.6678
## elapsed = 0.05 Round = 16 logCost = 6.0279 logGamma = -3.4814 Degree = 1.0000 Value = -0.0525
## elapsed = 0.08 Round = 17 logCost = 2.8288 logGamma = 1.6296 Degree = 2.0000 Value = -0.0746
## elapsed = 0.48 Round = 18 logCost = 13.5004 logGamma = -1.9421 Degree = 2.0000 Value = -0.0695
## elapsed = 0.06 Round = 19 logCost = -1.5418 logGamma = 0.3706 Degree = 4.0000 Value = -0.1000
## elapsed = 8.36 Round = 20 logCost = 16.7944 logGamma = -0.5696 Degree = 3.0000 Value = -0.0898
## elapsed = 9.77 Round = 21 logCost = 8.0767 logGamma = 4.0595 Degree = 4.0000 Value = -0.1000
## elapsed = 0.06 Round = 22 logCost = 9.4776 logGamma = -4.1480 Degree = 3.0000 Value = -0.1492
## elapsed = 8.25 Round = 23 logCost = 16.6300 logGamma = -0.8016 Degree = 3.0000 Value = -0.0644
## elapsed = 0.08 Round = 24 logCost = 10.4356 logGamma = -4.6248 Degree = 3.0000 Value = -0.1695
## elapsed = 0.11 Round = 25 logCost = 7.2445 logGamma = -8.0562 Degree = 4.0000 Value = -0.6678
## elapsed = 0.09 Round = 26 logCost = 4.3700 logGamma = -3.6790 Degree = 2.0000 Value = -0.2661
## elapsed = 0.61 Round = 27 logCost = 12.4167 logGamma = -1.3037 Degree = 4.0000 Value = -0.0746
## elapsed = 0.10 Round = 28 logCost = -0.2072 logGamma = -3.5495 Degree = 4.0000 Value = -0.6678
## elapsed = 0.04 Round = 29 logCost = 15.9732 logGamma = -6.9054 Degree = 2.0000 Value = -0.0797
## elapsed = 0.06 Round = 30 logCost = 17.0627 logGamma = -7.2907 Degree = 2.0000 Value = -0.0746
## elapsed = 0.75 Round = 31 logCost = 10.4807 logGamma = -0.7788 Degree = 3.0000 Value = -0.0559
## elapsed = 0.07 Round = 32 logCost = 1.3078 logGamma = -1.4961 Degree = 2.0000 Value = -0.1576
## elapsed = 0.09 Round = 33 logCost = -0.5958 logGamma = -4.3707 Degree = 4.0000 Value = -0.6678
## elapsed = 10.25 Round = 34 logCost = 12.6207 logGamma = 2.8276 Degree = 5.0000 Value = -0.1085
## elapsed = 9.33 Round = 35 logCost = 8.3371 logGamma = 4.0253 Degree = 3.0000 Value = -0.1847
## elapsed = 10.14 Round = 36 logCost = 12.2033 logGamma = 0.0287 Degree = 5.0000 Value = -0.1034
## elapsed = 0.09 Round = 37 logCost = 12.5207 logGamma = -4.9540 Degree = 4.0000 Value = -0.4119
## elapsed = 0.10 Round = 38 logCost = -1.1632 logGamma = 4.3850 Degree = 2.0000 Value = -0.0593
## elapsed = 0.09 Round = 39 logCost = 7.6583 logGamma = -4.3597 Degree = 4.0000 Value = -0.5508
## elapsed = 0.29 Round = 40 logCost = 3.8680 logGamma = 2.5258 Degree = 2.0000 Value = -0.0644
## elapsed = 11.04 Round = 41 logCost = 9.5338 logGamma = 2.9501 Degree = 4.0000 Value = -0.1034
## elapsed = 9.70 Round = 42 logCost = 17.6858 logGamma = 2.9679 Degree = 5.0000 Value = -0.1831
## elapsed = 0.08 Round = 43 logCost = 16.1313 logGamma = -6.8699 Degree = 3.0000 Value = -0.3102
## elapsed = 0.09 Round = 44 logCost = 0.8071 logGamma = -6.9476 Degree = 4.0000 Value = -0.6678
## elapsed = 0.08 Round = 45 logCost = 11.4682 logGamma = -1.7771 Degree = 5.0000 Value = -0.0746
## elapsed = 9.91 Round = 46 logCost = 14.4283 logGamma = 4.9103 Degree = 5.0000 Value = -0.1254
## elapsed = 0.11 Round = 47 logCost = 1.0083 logGamma = -3.7983 Degree = 2.0000 Value = -0.6102
## elapsed = 0.06 Round = 48 logCost = 10.7092 logGamma = -3.7880 Degree = 2.0000 Value = -0.0797
## elapsed = 1.72 Round = 49 logCost = 9.8284 logGamma = 0.6458 Degree = 2.0000 Value = -0.0712
## elapsed = 0.09 Round = 50 logCost = -1.8423 logGamma = -1.8271 Degree = 3.0000 Value = -0.4847
## elapsed = 2.29 Round = 51 logCost = 19.5982 logGamma = -4.0282 Degree = 2.0000 Value = -0.0712
## elapsed = 9.32 Round = 52 logCost = -1.5708 logGamma = 3.7632 Degree = 4.0000 Value = -0.0847
## elapsed = 0.06 Round = 53 logCost = 14.5566 logGamma = -9.0000 Degree = 1.0000 Value = -0.0508
## elapsed = 10.15 Round = 54 logCost = 2.7306 logGamma = 2.4157 Degree = 5.0000 Value = -0.1254
## elapsed = 0.13 Round = 55 logCost = 3.7923 logGamma = 5.0000 Degree = 1.0000 Value = -0.0475
## elapsed = 9.20 Round = 56 logCost = 11.5543 logGamma = 3.6758 Degree = 1.0000 Value = -0.0492
## elapsed = 15.82 Round = 57 logCost = 20.0000 logGamma = 5.0000 Degree = 1.0000 Value = -0.0729
## elapsed = 0.05 Round = 58 logCost = -5.0000 logGamma = 1.9663 Degree = 3.0000 Value = -0.0763
## elapsed = 8.60 Round = 59 logCost = 20.0000 logGamma = -1.4604 Degree = 5.0000 Value = -0.1186
## elapsed = 0.08 Round = 60 logCost = 15.9307 logGamma = -3.7015 Degree = 3.0000 Value = -0.0559
## elapsed = 6.01 Round = 61 logCost = 20.0000 logGamma = -5.9166 Degree = 1.0000 Value = -0.0475
## elapsed = 12.58 Round = 62 logCost = 20.0000 logGamma = -2.0793 Degree = 1.0000 Value = -0.1475
## elapsed = 0.04 Round = 63 logCost = 4.4065 logGamma = -1.0518 Degree = 1.0000 Value = -0.0475
## elapsed = 0.29 Round = 64 logCost = -5.0000 logGamma = 2.0546 Degree = 5.0000 Value = -0.0746
## elapsed = 11.23 Round = 65 logCost = 20.0000 logGamma = 5.0000 Degree = 3.0000 Value = -0.1186
## elapsed = 0.09 Round = 66 logCost = -5.0000 logGamma = 2.9575 Degree = 1.0000 Value = -0.2169
## elapsed = 15.91 Round = 67 logCost = 15.4134 logGamma = 5.0000 Degree = 2.0000 Value = -0.1559
## elapsed = 10.80 Round = 68 logCost = 3.3240 logGamma = 5.0000 Degree = 5.0000 Value = -0.1322
## elapsed = 2.31 Round = 69 logCost = -5.0000 logGamma = 5.0000 Degree = 3.0000 Value = -0.0576
## elapsed = 10.43 Round = 70 logCost = -5.0000 logGamma = 5.0000 Degree = 5.0000 Value = -0.1068
##
## Best Parameters Found:
## Round = 55 logCost = 3.7923 logGamma = 5.0000 Degree = 1.0000 Value = -0.0475
OPT_Res$Best_Par
## logCost logGamma Degree
## 3.79227 5.00000 1.00000
as.numeric(exp(OPT_Res$Best_Par["logCost"]))
## [1] 44.35696
as.numeric(exp(OPT_Res$Best_Par["logGamma"]))
## [1] 148.4132
CV.error<-NULL
for (i in 1:5) {
valid.data <- subset(train.set, fold == i)
train.data <- subset(train.set, fold != i)
svmfit<-svm(Group~Longitude + Latitude + OverallAvg, data = train.data, kernel="polynomial",
cost=exp(OPT_Res$Best_Par["logCost"]),
gamma=exp(OPT_Res$Best_Par["logGamma"]),
degree=OPT_Res$Best_Par["Degree"])
svm.y<-valid.data$Group
svm.predy<-predict(svmfit, valid.data)
ith.test.error<- mean(svm.y!=svm.predy)
CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)
}
sum(CV.error)
## [1] 0.04745763
svm_fit_bayes<-function(logCost, logGamma){
CV.error<-NULL
for (i in 1:5) {
valid.data <- subset(train.set, fold == i)
train.data <- subset(train.set, fold != i)
svmfit<-svm(Group~Longitude + Latitude + OverallAvg, data = train.data, kernel="radial",
cost=exp(logCost), gamma=exp(logGamma))
svm.y<-valid.data$Group
svm.predy<-predict(svmfit, valid.data)
ith.test.error<- mean(svm.y!=svm.predy)
CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)
}
list(Score=-sum(CV.error), pred=0)
}
set.seed(234)
OPT_Res<- BayesianOptimization(svm_fit_bayes, bounds= list(logCost = c(-5, 20),
logGamma = c(-9, 5)),
init_grid_dt = NULL, init_points = 50,
n_iter = 20, acq = "ucb", kappa =2.576,
eps=0, verbose = TRUE)
## elapsed = 0.15 Round = 1 logCost = 13.6405 logGamma = -0.1100 Value = -0.0712
## elapsed = 0.36 Round = 2 logCost = 14.5428 logGamma = -3.6803 Value = -0.0475
## elapsed = 0.37 Round = 3 logCost = -4.4991 logGamma = -6.8452 Value = -0.6678
## elapsed = 0.18 Round = 4 logCost = 14.4021 logGamma = 2.3082 Value = -0.0898
## elapsed = 0.38 Round = 5 logCost = -3.3272 logGamma = -7.8940 Value = -0.6678
## elapsed = 0.11 Round = 6 logCost = 11.1199 logGamma = -6.9799 Value = -0.0441
## elapsed = 0.19 Round = 7 logCost = 18.2346 logGamma = -7.7917 Value = -0.0542
## elapsed = 0.18 Round = 8 logCost = 12.9411 logGamma = -2.5374 Value = -0.0525
## elapsed = 0.17 Round = 9 logCost = 18.1934 logGamma = 2.3218 Value = -0.0898
## elapsed = 0.12 Round = 10 logCost = 2.1058 logGamma = 0.5545 Value = -0.0712
## elapsed = 0.10 Round = 11 logCost = 8.8931 logGamma = -3.8455 Value = -0.0441
## elapsed = 0.08 Round = 12 logCost = 8.6925 logGamma = -2.1492 Value = -0.0458
## elapsed = 0.11 Round = 13 logCost = 9.5712 logGamma = -5.4319 Value = -0.0373
## elapsed = 0.22 Round = 14 logCost = 9.5747 logGamma = 3.1058 Value = -0.0966
## elapsed = 0.36 Round = 15 logCost = -4.9700 logGamma = -2.5619 Value = -0.6678
## elapsed = 0.12 Round = 16 logCost = 6.0279 logGamma = -3.4814 Value = -0.0525
## elapsed = 0.14 Round = 17 logCost = 2.8288 logGamma = 1.6296 Value = -0.0678
## elapsed = 0.49 Round = 18 logCost = 13.5004 logGamma = -1.9421 Value = -0.0644
## elapsed = 0.25 Round = 19 logCost = -1.5418 logGamma = 0.3706 Value = -0.1339
## elapsed = 0.43 Round = 20 logCost = 16.7944 logGamma = -0.5696 Value = -0.0678
## elapsed = 0.30 Round = 21 logCost = 8.0767 logGamma = 4.0595 Value = -0.1220
## elapsed = 0.10 Round = 22 logCost = 9.4776 logGamma = -4.1480 Value = -0.0458
## elapsed = 0.43 Round = 23 logCost = 16.6300 logGamma = -0.8016 Value = -0.0610
## elapsed = 0.10 Round = 24 logCost = 10.4356 logGamma = -4.6248 Value = -0.0475
## elapsed = 0.22 Round = 25 logCost = 7.2445 logGamma = -8.0562 Value = -0.0915
## elapsed = 0.16 Round = 26 logCost = 4.3700 logGamma = -3.6790 Value = -0.0712
## elapsed = 0.18 Round = 27 logCost = 12.4167 logGamma = -1.3037 Value = -0.0593
## elapsed = 0.35 Round = 28 logCost = -0.2072 logGamma = -3.5495 Value = -0.4407
## elapsed = 0.20 Round = 29 logCost = 15.9732 logGamma = -6.9054 Value = -0.0525
## elapsed = 0.22 Round = 30 logCost = 17.0627 logGamma = -7.2907 Value = -0.0576
## elapsed = 0.11 Round = 31 logCost = 10.4807 logGamma = -0.7788 Value = -0.0661
## elapsed = 0.17 Round = 32 logCost = 1.3078 logGamma = -1.4961 Value = -0.0881
## elapsed = 0.38 Round = 33 logCost = -0.5958 logGamma = -4.3707 Value = -0.5576
## elapsed = 0.20 Round = 34 logCost = 12.6207 logGamma = 2.8276 Value = -0.1000
## elapsed = 0.30 Round = 35 logCost = 8.3371 logGamma = 4.0253 Value = -0.1169
## elapsed = 0.12 Round = 36 logCost = 12.2033 logGamma = 0.0287 Value = -0.0729
## elapsed = 0.11 Round = 37 logCost = 12.5207 logGamma = -4.9540 Value = -0.0542
## elapsed = 0.31 Round = 38 logCost = -1.1632 logGamma = 4.3850 Value = -0.4254
## elapsed = 0.11 Round = 39 logCost = 7.6583 logGamma = -4.3597 Value = -0.0492
## elapsed = 0.19 Round = 40 logCost = 3.8680 logGamma = 2.5258 Value = -0.0780
## elapsed = 0.22 Round = 41 logCost = 9.5338 logGamma = 2.9501 Value = -0.0983
## elapsed = 0.21 Round = 42 logCost = 17.6858 logGamma = 2.9679 Value = -0.1017
## elapsed = 0.17 Round = 43 logCost = 16.1313 logGamma = -6.8699 Value = -0.0559
## elapsed = 0.38 Round = 44 logCost = 0.8071 logGamma = -6.9476 Value = -0.5593
## elapsed = 0.14 Round = 45 logCost = 11.4682 logGamma = -1.7771 Value = -0.0576
## elapsed = 0.31 Round = 46 logCost = 14.4283 logGamma = 4.9103 Value = -0.2119
## elapsed = 0.32 Round = 47 logCost = 1.0083 logGamma = -3.7983 Value = -0.2136
## elapsed = 0.11 Round = 48 logCost = 10.7092 logGamma = -3.7880 Value = -0.0492
## elapsed = 0.12 Round = 49 logCost = 9.8284 logGamma = 0.6458 Value = -0.0729
## elapsed = 0.30 Round = 50 logCost = -1.8423 logGamma = -1.8271 Value = -0.4475
## elapsed = 4.04 Round = 51 logCost = 20.0000 logGamma = -3.2833 Value = -0.0576
## elapsed = 0.09 Round = 52 logCost = 13.7862 logGamma = -9.0000 Value = -0.0525
## elapsed = 0.28 Round = 53 logCost = 20.0000 logGamma = 5.0000 Value = -0.2203
## elapsed = 0.17 Round = 54 logCost = 9.5153 logGamma = -9.0000 Value = -0.0627
## elapsed = 0.09 Round = 55 logCost = 5.0990 logGamma = -0.2921 Value = -0.0542
## elapsed = 0.12 Round = 56 logCost = 17.3790 logGamma = -9.0000 Value = -0.0492
## elapsed = 0.27 Round = 57 logCost = -5.0000 logGamma = 1.8444 Value = -0.6678
## elapsed = 0.30 Round = 58 logCost = 3.0472 logGamma = 5.0000 Value = -0.2203
## elapsed = 2.57 Round = 59 logCost = 17.9054 logGamma = -3.3314 Value = -0.0525
## elapsed = 1.50 Round = 60 logCost = 20.0000 logGamma = -0.9956 Value = -0.0627
## elapsed = 0.51 Round = 61 logCost = 20.0000 logGamma = -6.3013 Value = -0.0593
## elapsed = 0.15 Round = 62 logCost = 3.4012 logGamma = -2.1806 Value = -0.0627
## elapsed = 0.11 Round = 63 logCost = 6.9849 logGamma = 0.5412 Value = -0.0644
## elapsed = 0.14 Round = 64 logCost = 20.0000 logGamma = -9.0000 Value = -0.0475
## elapsed = 0.16 Round = 65 logCost = 20.0000 logGamma = 2.3950 Value = -0.0932
## elapsed = 0.28 Round = 66 logCost = 11.2449 logGamma = 5.0000 Value = -0.2203
## elapsed = 0.09 Round = 67 logCost = 6.6428 logGamma = -1.3861 Value = -0.0508
## elapsed = 3.12 Round = 68 logCost = 18.4032 logGamma = -1.8376 Value = -0.0644
## elapsed = 0.09 Round = 69 logCost = 15.4659 logGamma = -9.0000 Value = -0.0508
## elapsed = 0.13 Round = 70 logCost = 9.1166 logGamma = -7.6082 Value = -0.0542
##
## Best Parameters Found:
## Round = 13 logCost = 9.5712 logGamma = -5.4319 Value = -0.0373
OPT_Res$Best_Par
## logCost logGamma
## 9.571196 -5.431880
as.numeric(exp(OPT_Res$Best_Par["logCost"]))
## [1] 14345.57
as.numeric(exp(OPT_Res$Best_Par["logGamma"]))
## [1] 0.004374861
CV.error<-NULL
for (i in 1:5) {
valid.data <- subset(train.set, fold == i)
train.data <- subset(train.set, fold != i)
svmfit<-svm(Group~Longitude + Latitude + OverallAvg, data = train.data, kernel="radial",
cost=exp(OPT_Res$Best_Par["logCost"]),
gamma=exp(OPT_Res$Best_Par["logGamma"]))
svm.y<-valid.data$Group
svm.predy<-predict(svmfit, valid.data)
ith.test.error<- mean(svm.y!=svm.predy)
CV.error<-c(CV.error,(nrow(valid.data)/nrow(train.set))*ith.test.error)
}
sum(CV.error)
## [1] 0.03728814