setwd("~/Google Drive/UM/Smart Services/Thesis/Thesis/Code/Naive Bayes/2.Feature Set 1/TP/Full")
#install.packages("naivebayes")
library(dplyr)
## Warning: Installed Rcpp (0.12.16) different from Rcpp used to build dplyr (0.12.11).
## Please reinstall dplyr to avoid random crashes or undefined behavior.
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
library(psych)
library(ggplot2)
##
## Attaching package: 'ggplot2'
## The following objects are masked from 'package:psych':
##
## %+%, alpha
library(e1071)
library(readxl)
library(caret)
## Loading required package: lattice
#Import Labels
Labels <- read_excel("~/Google Drive/UM/Smart Services/Thesis/Thesis/Code/Naive Bayes/1.Labels/Source Data.xlsx")
Label <- Labels$Score
#Import Features
Features <- read.csv("~/Google Drive/UM/Smart Services/Thesis/Thesis/Code/Naive Bayes/2.Feature Set 1/TP/Full/Feature Set 1 Full TP.csv")
Features <- Features[-1]
#Class 2
Label2 <- list()
for(i in 1:1000){
if(Label[i]==3| Label[i]==4){
Label2[i] <- 1
}else{
Label2[i] <- 0
}
}
#As Factor
Label2 <- as.factor(unlist(Label2))
#Class 3
Label3 <- list()
for(i in 1:1000){
if(Label[i]==5| Label[i]==6){
Label3[i] <- 1
}else{
Label3[i] <- 0
}
}
#As Factor
Label3 <- as.factor(unlist(Label3))
#Class 4
Label4 <- list()
for(i in 1:1000){
if(Label[i]==7| Label[i]==8){
Label4[i] <- 1
}else{
Label4[i] <- 0
}
}
#As Factor
Label4 <- as.factor(unlist(Label4))
#Class 5
Label5 <- list()
for(i in 1:1000){
if(Label[i]==9| Label[i]==10){
Label5[i] <- 1
}else{
Label5[i] <- 0
}
}
#As Factor
Label5 <- as.factor(unlist(Label5))
#All Labels
All <- list()
for(i in 1:1000){
if(Label[i]==9| Label[i]==10){
All[i] <- 5
}else if(Label[i]==7| Label[i]==8){
All[i] <- 4
}else if(Label[i]==5| Label[i]==6){
All[i] <- 3
}else{
All[i] <- 2
}
}
#As Factor
All <- as.factor(unlist(All))
#Control
Control.df <- data.frame(matrix(seq(1,1000),ncol=1,nrow=1000))
Control.df$Actual <- as.factor(Label)
Control.df$All <- All
Control.df$Label2 <- Label2
Control.df$Label3 <- Label3
Control.df$Label4 <- Label4
Control.df$Label5 <- Label5
Control.df[1:10,2:7]
## Actual All Label2 Label3 Label4 Label5
## 1 3 2 1 0 0 0
## 2 8 4 0 0 1 0
## 3 7 4 0 0 1 0
## 4 4 2 1 0 0 0
## 5 7 4 0 0 1 0
## 6 7 4 0 0 1 0
## 7 5 3 0 1 0 0
## 8 10 5 0 0 0 1
## 9 7 4 0 0 1 0
## 10 8 4 0 0 1 0
#Transform Integer to Factor
for(i in 1:2672){
Features[,i] <- as.factor(Features[,i])
}
str(Features)
## 'data.frame': 1000 obs. of 2672 variables:
## $ abil : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ abit : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ abl : Factor w/ 2 levels "0","1": 2 1 1 1 1 1 1 1 1 1 ...
## $ abnorm : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ about : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ abov : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ abrupt : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ absolut : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ accent : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ accept : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ access : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ accid : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ accommod : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ accomplish : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ accur : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ accustom : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ acess : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ ach : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ acknowledg : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ acomod : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ across : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ activ : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ actual : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 2 1 ...
## $ adaptor : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ add : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ addit : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ adequ : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ adjac : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ adjust : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ ador : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ adult : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ advanc : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ advantag : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ adverti : Factor w/ 2 levels "0","1": 1 1 1 1 2 1 1 1 1 1 ...
## $ advi : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ advic : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ affair : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ affect : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ afford : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ afraid : Factor w/ 2 levels "0","1": 1 1 1 2 1 1 1 1 1 1 ...
## $ africa : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ after : Factor w/ 2 levels "0","1": 1 1 1 1 2 1 1 1 1 1 ...
## $ afterdinn : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ afternoon : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ afterward : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ age : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ ago : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ agr : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ agreeabl : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ ahead : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ air : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ aircon : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ aircondit : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 2 ...
## $ airi : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ airless : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ airport : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ alarm : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ albeit : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ albert : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ albrt : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ alcohol : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ aldo : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ alittl : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ all : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ allevi : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ alloc : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ allow : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ almost : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ along : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ alongsid : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ alot : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ alreadi : Factor w/ 2 levels "0","1": 1 1 1 2 1 1 1 1 1 1 ...
## $ alright : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ also : Factor w/ 2 levels "0","1": 1 1 1 2 1 1 2 1 2 1 ...
## $ altern : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ although : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ alway : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ amaz : Factor w/ 2 levels "0","1": 1 1 1 1 2 1 1 1 1 1 ...
## $ ambianc : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ ambienc : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ amen : Factor w/ 2 levels "0","1": 1 2 1 1 1 1 1 1 1 1 ...
## $ amend : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ america : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ american : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ amongst : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ amount : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ ampl : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ amsterdam : Factor w/ 2 levels "0","1": 1 2 1 2 1 1 1 1 1 1 ...
## $ and : Factor w/ 2 levels "0","1": 2 1 1 1 1 1 1 1 1 1 ...
## $ angl : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ angri : Factor w/ 2 levels "0","1": 2 1 1 2 1 1 1 1 1 1 ...
## $ ann : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ anna : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ annex : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ announc : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ annoy : Factor w/ 2 levels "0","1": 2 1 1 1 1 1 1 1 1 1 ...
## $ anoth : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ ansterdam : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## $ answer : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
## [list output truncated]
#Features
set.seed(1234)
ind <- sample(2,nrow(Features),replace = T, prob =c(0.8,0.2))
train <- Features[ind == 1,]
test <- Features[ind ==2,]
train.labels.2 <- Label2[ind == 1]
test.labels.2 <- Label2[ind ==2]
train.labels.3 <- Label3[ind == 1]
test.labels.3 <- Label3[ind ==2]
train.labels.4 <- Label4[ind == 1]
test.labels.4 <- Label4[ind ==2]
train.labels.5 <- Label5[ind == 1]
test.labels.5 <- Label5[ind ==2]
train.labels <- All[ind == 1]
test.labels <- All[ind ==2]
data.frame(train.labels,train.labels.2,train.labels.3,train.labels.4,train.labels.5)
## train.labels train.labels.2 train.labels.3 train.labels.4
## 1 2 1 0 0
## 2 4 0 0 1
## 3 4 0 0 1
## 4 2 1 0 0
## 5 4 0 0 1
## 6 3 0 1 0
## 7 5 0 0 0
## 8 4 0 0 1
## 9 4 0 0 1
## 10 5 0 0 0
## 11 3 0 1 0
## 12 3 0 1 0
## 13 5 0 0 0
## 14 3 0 1 0
## 15 4 0 0 1
## 16 4 0 0 1
## 17 4 0 0 1
## 18 3 0 1 0
## 19 2 1 0 0
## 20 3 0 1 0
## 21 5 0 0 0
## 22 5 0 0 0
## 23 5 0 0 0
## 24 5 0 0 0
## 25 5 0 0 0
## 26 4 0 0 1
## 27 2 1 0 0
## 28 4 0 0 1
## 29 4 0 0 1
## 30 4 0 0 1
## 31 4 0 0 1
## 32 5 0 0 0
## 33 4 0 0 1
## 34 4 0 0 1
## 35 2 1 0 0
## 36 5 0 0 0
## 37 2 1 0 0
## 38 4 0 0 1
## 39 2 1 0 0
## 40 4 0 0 1
## 41 5 0 0 0
## 42 3 0 1 0
## 43 5 0 0 0
## 44 4 0 0 1
## 45 5 0 0 0
## 46 5 0 0 0
## 47 4 0 0 1
## 48 5 0 0 0
## 49 4 0 0 1
## 50 4 0 0 1
## 51 4 0 0 1
## 52 4 0 0 1
## 53 4 0 0 1
## 54 3 0 1 0
## 55 5 0 0 0
## 56 5 0 0 0
## 57 5 0 0 0
## 58 4 0 0 1
## 59 4 0 0 1
## 60 5 0 0 0
## 61 4 0 0 1
## 62 4 0 0 1
## 63 3 0 1 0
## 64 4 0 0 1
## 65 5 0 0 0
## 66 4 0 0 1
## 67 4 0 0 1
## 68 5 0 0 0
## 69 5 0 0 0
## 70 5 0 0 0
## 71 3 0 1 0
## 72 5 0 0 0
## 73 4 0 0 1
## 74 4 0 0 1
## 75 3 0 1 0
## 76 3 0 1 0
## 77 2 1 0 0
## 78 3 0 1 0
## 79 2 1 0 0
## 80 4 0 0 1
## 81 5 0 0 0
## 82 5 0 0 0
## 83 5 0 0 0
## 84 3 0 1 0
## 85 3 0 1 0
## 86 3 0 1 0
## 87 3 0 1 0
## 88 5 0 0 0
## 89 5 0 0 0
## 90 4 0 0 1
## 91 5 0 0 0
## 92 4 0 0 1
## 93 2 1 0 0
## 94 5 0 0 0
## 95 5 0 0 0
## 96 4 0 0 1
## 97 3 0 1 0
## 98 5 0 0 0
## 99 5 0 0 0
## 100 5 0 0 0
## 101 4 0 0 1
## 102 4 0 0 1
## 103 3 0 1 0
## 104 3 0 1 0
## 105 4 0 0 1
## 106 3 0 1 0
## 107 5 0 0 0
## 108 5 0 0 0
## 109 4 0 0 1
## 110 4 0 0 1
## 111 4 0 0 1
## 112 5 0 0 0
## 113 5 0 0 0
## 114 5 0 0 0
## 115 3 0 1 0
## 116 5 0 0 0
## 117 4 0 0 1
## 118 3 0 1 0
## 119 5 0 0 0
## 120 5 0 0 0
## 121 2 1 0 0
## 122 3 0 1 0
## 123 5 0 0 0
## 124 3 0 1 0
## 125 5 0 0 0
## 126 4 0 0 1
## 127 4 0 0 1
## 128 4 0 0 1
## 129 5 0 0 0
## 130 2 1 0 0
## 131 5 0 0 0
## 132 4 0 0 1
## 133 3 0 1 0
## 134 5 0 0 0
## 135 4 0 0 1
## 136 5 0 0 0
## 137 4 0 0 1
## 138 4 0 0 1
## 139 2 1 0 0
## 140 5 0 0 0
## 141 4 0 0 1
## 142 2 1 0 0
## 143 5 0 0 0
## 144 5 0 0 0
## 145 4 0 0 1
## 146 5 0 0 0
## 147 5 0 0 0
## 148 5 0 0 0
## 149 4 0 0 1
## 150 4 0 0 1
## 151 4 0 0 1
## 152 3 0 1 0
## 153 4 0 0 1
## 154 4 0 0 1
## 155 2 1 0 0
## 156 5 0 0 0
## 157 2 1 0 0
## 158 3 0 1 0
## 159 5 0 0 0
## 160 5 0 0 0
## 161 4 0 0 1
## 162 5 0 0 0
## 163 5 0 0 0
## 164 3 0 1 0
## 165 3 0 1 0
## 166 5 0 0 0
## 167 4 0 0 1
## 168 3 0 1 0
## 169 5 0 0 0
## 170 4 0 0 1
## 171 5 0 0 0
## 172 5 0 0 0
## 173 3 0 1 0
## 174 4 0 0 1
## 175 4 0 0 1
## 176 5 0 0 0
## 177 3 0 1 0
## 178 5 0 0 0
## 179 5 0 0 0
## 180 5 0 0 0
## 181 5 0 0 0
## 182 5 0 0 0
## 183 5 0 0 0
## 184 5 0 0 0
## 185 4 0 0 1
## 186 5 0 0 0
## 187 5 0 0 0
## 188 5 0 0 0
## 189 5 0 0 0
## 190 4 0 0 1
## 191 5 0 0 0
## 192 4 0 0 1
## 193 5 0 0 0
## 194 4 0 0 1
## 195 4 0 0 1
## 196 4 0 0 1
## 197 5 0 0 0
## 198 5 0 0 0
## 199 5 0 0 0
## 200 5 0 0 0
## 201 4 0 0 1
## 202 4 0 0 1
## 203 4 0 0 1
## 204 3 0 1 0
## 205 5 0 0 0
## 206 5 0 0 0
## 207 5 0 0 0
## 208 3 0 1 0
## 209 5 0 0 0
## 210 4 0 0 1
## 211 5 0 0 0
## 212 3 0 1 0
## 213 4 0 0 1
## 214 4 0 0 1
## 215 3 0 1 0
## 216 4 0 0 1
## 217 4 0 0 1
## 218 5 0 0 0
## 219 2 1 0 0
## 220 5 0 0 0
## 221 5 0 0 0
## 222 5 0 0 0
## 223 4 0 0 1
## 224 5 0 0 0
## 225 5 0 0 0
## 226 5 0 0 0
## 227 5 0 0 0
## 228 5 0 0 0
## 229 2 1 0 0
## 230 5 0 0 0
## 231 4 0 0 1
## 232 4 0 0 1
## 233 5 0 0 0
## 234 5 0 0 0
## 235 5 0 0 0
## 236 5 0 0 0
## 237 5 0 0 0
## 238 5 0 0 0
## 239 5 0 0 0
## 240 5 0 0 0
## 241 4 0 0 1
## 242 5 0 0 0
## 243 3 0 1 0
## 244 3 0 1 0
## 245 3 0 1 0
## 246 5 0 0 0
## 247 5 0 0 0
## 248 3 0 1 0
## 249 5 0 0 0
## 250 4 0 0 1
## 251 5 0 0 0
## 252 5 0 0 0
## 253 5 0 0 0
## 254 5 0 0 0
## 255 3 0 1 0
## 256 4 0 0 1
## 257 4 0 0 1
## 258 5 0 0 0
## 259 5 0 0 0
## 260 5 0 0 0
## 261 4 0 0 1
## 262 4 0 0 1
## 263 5 0 0 0
## 264 5 0 0 0
## 265 2 1 0 0
## 266 4 0 0 1
## 267 2 1 0 0
## 268 4 0 0 1
## 269 2 1 0 0
## 270 5 0 0 0
## 271 4 0 0 1
## 272 2 1 0 0
## 273 5 0 0 0
## 274 3 0 1 0
## 275 3 0 1 0
## 276 5 0 0 0
## 277 2 1 0 0
## 278 5 0 0 0
## 279 4 0 0 1
## 280 3 0 1 0
## 281 5 0 0 0
## 282 5 0 0 0
## 283 3 0 1 0
## 284 5 0 0 0
## 285 5 0 0 0
## 286 3 0 1 0
## 287 3 0 1 0
## 288 5 0 0 0
## 289 5 0 0 0
## 290 5 0 0 0
## 291 3 0 1 0
## 292 5 0 0 0
## 293 5 0 0 0
## 294 5 0 0 0
## 295 5 0 0 0
## 296 3 0 1 0
## 297 3 0 1 0
## 298 4 0 0 1
## 299 4 0 0 1
## 300 4 0 0 1
## 301 4 0 0 1
## 302 5 0 0 0
## 303 5 0 0 0
## 304 4 0 0 1
## 305 3 0 1 0
## 306 5 0 0 0
## 307 5 0 0 0
## 308 3 0 1 0
## 309 5 0 0 0
## 310 4 0 0 1
## 311 5 0 0 0
## 312 5 0 0 0
## 313 5 0 0 0
## 314 5 0 0 0
## 315 3 0 1 0
## 316 5 0 0 0
## 317 5 0 0 0
## 318 5 0 0 0
## 319 5 0 0 0
## 320 4 0 0 1
## 321 5 0 0 0
## 322 5 0 0 0
## 323 4 0 0 1
## 324 5 0 0 0
## 325 5 0 0 0
## 326 5 0 0 0
## 327 5 0 0 0
## 328 5 0 0 0
## 329 5 0 0 0
## 330 5 0 0 0
## 331 5 0 0 0
## 332 4 0 0 1
## 333 5 0 0 0
## 334 5 0 0 0
## 335 4 0 0 1
## 336 5 0 0 0
## 337 4 0 0 1
## 338 4 0 0 1
## 339 3 0 1 0
## 340 5 0 0 0
## 341 4 0 0 1
## 342 5 0 0 0
## 343 4 0 0 1
## 344 4 0 0 1
## 345 5 0 0 0
## 346 3 0 1 0
## 347 4 0 0 1
## 348 4 0 0 1
## 349 4 0 0 1
## 350 5 0 0 0
## 351 5 0 0 0
## 352 4 0 0 1
## 353 4 0 0 1
## 354 5 0 0 0
## 355 4 0 0 1
## 356 4 0 0 1
## 357 5 0 0 0
## 358 5 0 0 0
## 359 5 0 0 0
## 360 5 0 0 0
## 361 5 0 0 0
## 362 4 0 0 1
## 363 5 0 0 0
## 364 4 0 0 1
## 365 5 0 0 0
## 366 5 0 0 0
## 367 5 0 0 0
## 368 5 0 0 0
## 369 4 0 0 1
## 370 5 0 0 0
## 371 4 0 0 1
## 372 5 0 0 0
## 373 3 0 1 0
## 374 4 0 0 1
## 375 5 0 0 0
## 376 5 0 0 0
## 377 4 0 0 1
## 378 4 0 0 1
## 379 4 0 0 1
## 380 3 0 1 0
## 381 5 0 0 0
## 382 4 0 0 1
## 383 5 0 0 0
## 384 3 0 1 0
## 385 3 0 1 0
## 386 4 0 0 1
## 387 5 0 0 0
## 388 5 0 0 0
## 389 3 0 1 0
## 390 4 0 0 1
## 391 4 0 0 1
## 392 5 0 0 0
## 393 3 0 1 0
## 394 4 0 0 1
## 395 5 0 0 0
## 396 5 0 0 0
## 397 3 0 1 0
## 398 4 0 0 1
## 399 4 0 0 1
## 400 4 0 0 1
## 401 5 0 0 0
## 402 5 0 0 0
## 403 5 0 0 0
## 404 5 0 0 0
## 405 5 0 0 0
## 406 3 0 1 0
## 407 5 0 0 0
## 408 5 0 0 0
## 409 4 0 0 1
## 410 4 0 0 1
## 411 5 0 0 0
## 412 5 0 0 0
## 413 5 0 0 0
## 414 2 1 0 0
## 415 5 0 0 0
## 416 4 0 0 1
## 417 5 0 0 0
## 418 4 0 0 1
## 419 5 0 0 0
## 420 4 0 0 1
## 421 5 0 0 0
## 422 5 0 0 0
## 423 3 0 1 0
## 424 5 0 0 0
## 425 4 0 0 1
## 426 4 0 0 1
## 427 5 0 0 0
## 428 5 0 0 0
## 429 3 0 1 0
## 430 5 0 0 0
## 431 5 0 0 0
## 432 5 0 0 0
## 433 3 0 1 0
## 434 4 0 0 1
## 435 4 0 0 1
## 436 5 0 0 0
## 437 4 0 0 1
## 438 4 0 0 1
## 439 5 0 0 0
## 440 4 0 0 1
## 441 5 0 0 0
## 442 3 0 1 0
## 443 5 0 0 0
## 444 4 0 0 1
## 445 5 0 0 0
## 446 5 0 0 0
## 447 5 0 0 0
## 448 5 0 0 0
## 449 5 0 0 0
## 450 5 0 0 0
## 451 5 0 0 0
## 452 5 0 0 0
## 453 5 0 0 0
## 454 5 0 0 0
## 455 5 0 0 0
## 456 5 0 0 0
## 457 5 0 0 0
## 458 5 0 0 0
## 459 4 0 0 1
## 460 5 0 0 0
## 461 5 0 0 0
## 462 5 0 0 0
## 463 5 0 0 0
## 464 3 0 1 0
## 465 5 0 0 0
## 466 5 0 0 0
## 467 4 0 0 1
## 468 5 0 0 0
## 469 4 0 0 1
## 470 5 0 0 0
## 471 5 0 0 0
## 472 5 0 0 0
## 473 5 0 0 0
## 474 4 0 0 1
## 475 5 0 0 0
## 476 4 0 0 1
## 477 4 0 0 1
## 478 5 0 0 0
## 479 2 1 0 0
## 480 5 0 0 0
## 481 4 0 0 1
## 482 5 0 0 0
## 483 5 0 0 0
## 484 5 0 0 0
## 485 5 0 0 0
## 486 5 0 0 0
## 487 5 0 0 0
## 488 3 0 1 0
## 489 5 0 0 0
## 490 3 0 1 0
## 491 5 0 0 0
## 492 5 0 0 0
## 493 5 0 0 0
## 494 5 0 0 0
## 495 4 0 0 1
## 496 5 0 0 0
## 497 3 0 1 0
## 498 4 0 0 1
## 499 4 0 0 1
## 500 5 0 0 0
## 501 5 0 0 0
## 502 4 0 0 1
## 503 4 0 0 1
## 504 4 0 0 1
## 505 5 0 0 0
## 506 5 0 0 0
## 507 4 0 0 1
## 508 5 0 0 0
## 509 4 0 0 1
## 510 5 0 0 0
## 511 4 0 0 1
## 512 5 0 0 0
## 513 5 0 0 0
## 514 5 0 0 0
## 515 5 0 0 0
## 516 5 0 0 0
## 517 4 0 0 1
## 518 4 0 0 1
## 519 5 0 0 0
## 520 2 1 0 0
## 521 2 1 0 0
## 522 4 0 0 1
## 523 5 0 0 0
## 524 4 0 0 1
## 525 5 0 0 0
## 526 4 0 0 1
## 527 5 0 0 0
## 528 4 0 0 1
## 529 4 0 0 1
## 530 4 0 0 1
## 531 4 0 0 1
## 532 5 0 0 0
## 533 4 0 0 1
## 534 4 0 0 1
## 535 5 0 0 0
## 536 5 0 0 0
## 537 4 0 0 1
## 538 5 0 0 0
## 539 5 0 0 0
## 540 3 0 1 0
## 541 5 0 0 0
## 542 5 0 0 0
## 543 4 0 0 1
## 544 4 0 0 1
## 545 5 0 0 0
## 546 5 0 0 0
## 547 5 0 0 0
## 548 4 0 0 1
## 549 5 0 0 0
## 550 5 0 0 0
## 551 4 0 0 1
## 552 5 0 0 0
## 553 5 0 0 0
## 554 4 0 0 1
## 555 3 0 1 0
## 556 4 0 0 1
## 557 3 0 1 0
## 558 4 0 0 1
## 559 4 0 0 1
## 560 5 0 0 0
## 561 5 0 0 0
## 562 5 0 0 0
## 563 4 0 0 1
## 564 4 0 0 1
## 565 5 0 0 0
## 566 4 0 0 1
## 567 4 0 0 1
## 568 5 0 0 0
## 569 3 0 1 0
## 570 5 0 0 0
## 571 4 0 0 1
## 572 5 0 0 0
## 573 4 0 0 1
## 574 4 0 0 1
## 575 5 0 0 0
## 576 5 0 0 0
## 577 5 0 0 0
## 578 5 0 0 0
## 579 5 0 0 0
## 580 5 0 0 0
## 581 5 0 0 0
## 582 4 0 0 1
## 583 4 0 0 1
## 584 5 0 0 0
## 585 3 0 1 0
## 586 5 0 0 0
## 587 5 0 0 0
## 588 4 0 0 1
## 589 5 0 0 0
## 590 5 0 0 0
## 591 5 0 0 0
## 592 4 0 0 1
## 593 5 0 0 0
## 594 5 0 0 0
## 595 5 0 0 0
## 596 5 0 0 0
## 597 4 0 0 1
## 598 4 0 0 1
## 599 3 0 1 0
## 600 4 0 0 1
## 601 5 0 0 0
## 602 5 0 0 0
## 603 5 0 0 0
## 604 4 0 0 1
## 605 4 0 0 1
## 606 5 0 0 0
## 607 5 0 0 0
## 608 3 0 1 0
## 609 4 0 0 1
## 610 5 0 0 0
## 611 5 0 0 0
## 612 4 0 0 1
## 613 5 0 0 0
## 614 5 0 0 0
## 615 5 0 0 0
## 616 5 0 0 0
## 617 3 0 1 0
## 618 5 0 0 0
## 619 4 0 0 1
## 620 4 0 0 1
## 621 5 0 0 0
## 622 4 0 0 1
## 623 5 0 0 0
## 624 4 0 0 1
## 625 4 0 0 1
## 626 5 0 0 0
## 627 5 0 0 0
## 628 4 0 0 1
## 629 5 0 0 0
## 630 4 0 0 1
## 631 4 0 0 1
## 632 5 0 0 0
## 633 5 0 0 0
## 634 5 0 0 0
## 635 5 0 0 0
## 636 5 0 0 0
## 637 5 0 0 0
## 638 4 0 0 1
## 639 5 0 0 0
## 640 5 0 0 0
## 641 3 0 1 0
## 642 5 0 0 0
## 643 4 0 0 1
## 644 3 0 1 0
## 645 5 0 0 0
## 646 5 0 0 0
## 647 4 0 0 1
## 648 5 0 0 0
## 649 5 0 0 0
## 650 5 0 0 0
## 651 5 0 0 0
## 652 5 0 0 0
## 653 5 0 0 0
## 654 5 0 0 0
## 655 5 0 0 0
## 656 4 0 0 1
## 657 5 0 0 0
## 658 5 0 0 0
## 659 5 0 0 0
## 660 3 0 1 0
## 661 5 0 0 0
## 662 5 0 0 0
## 663 5 0 0 0
## 664 5 0 0 0
## 665 4 0 0 1
## 666 5 0 0 0
## 667 4 0 0 1
## 668 5 0 0 0
## 669 4 0 0 1
## 670 5 0 0 0
## 671 4 0 0 1
## 672 5 0 0 0
## 673 5 0 0 0
## 674 5 0 0 0
## 675 5 0 0 0
## 676 5 0 0 0
## 677 4 0 0 1
## 678 5 0 0 0
## 679 4 0 0 1
## 680 5 0 0 0
## 681 5 0 0 0
## 682 3 0 1 0
## 683 4 0 0 1
## 684 4 0 0 1
## 685 5 0 0 0
## 686 5 0 0 0
## 687 4 0 0 1
## 688 4 0 0 1
## 689 4 0 0 1
## 690 5 0 0 0
## 691 5 0 0 0
## 692 4 0 0 1
## 693 5 0 0 0
## 694 5 0 0 0
## 695 5 0 0 0
## 696 5 0 0 0
## 697 5 0 0 0
## 698 5 0 0 0
## 699 5 0 0 0
## 700 5 0 0 0
## 701 5 0 0 0
## 702 4 0 0 1
## 703 5 0 0 0
## 704 5 0 0 0
## 705 4 0 0 1
## 706 4 0 0 1
## 707 5 0 0 0
## 708 5 0 0 0
## 709 4 0 0 1
## 710 4 0 0 1
## 711 5 0 0 0
## 712 5 0 0 0
## 713 3 0 1 0
## 714 5 0 0 0
## 715 5 0 0 0
## 716 5 0 0 0
## 717 5 0 0 0
## 718 5 0 0 0
## 719 5 0 0 0
## 720 4 0 0 1
## 721 5 0 0 0
## 722 5 0 0 0
## 723 5 0 0 0
## 724 4 0 0 1
## 725 5 0 0 0
## 726 5 0 0 0
## 727 4 0 0 1
## 728 3 0 1 0
## 729 5 0 0 0
## 730 5 0 0 0
## 731 4 0 0 1
## 732 4 0 0 1
## 733 5 0 0 0
## 734 5 0 0 0
## 735 5 0 0 0
## 736 5 0 0 0
## 737 5 0 0 0
## 738 5 0 0 0
## 739 5 0 0 0
## 740 5 0 0 0
## 741 5 0 0 0
## 742 5 0 0 0
## 743 5 0 0 0
## 744 5 0 0 0
## 745 5 0 0 0
## 746 4 0 0 1
## 747 4 0 0 1
## 748 5 0 0 0
## 749 5 0 0 0
## 750 5 0 0 0
## 751 5 0 0 0
## 752 5 0 0 0
## 753 5 0 0 0
## 754 5 0 0 0
## 755 5 0 0 0
## 756 5 0 0 0
## 757 4 0 0 1
## 758 4 0 0 1
## 759 5 0 0 0
## 760 4 0 0 1
## 761 4 0 0 1
## 762 5 0 0 0
## 763 4 0 0 1
## 764 5 0 0 0
## 765 5 0 0 0
## 766 4 0 0 1
## 767 4 0 0 1
## 768 4 0 0 1
## 769 4 0 0 1
## 770 5 0 0 0
## 771 4 0 0 1
## 772 5 0 0 0
## 773 4 0 0 1
## 774 5 0 0 0
## 775 4 0 0 1
## 776 4 0 0 1
## 777 5 0 0 0
## 778 5 0 0 0
## 779 4 0 0 1
## 780 5 0 0 0
## 781 5 0 0 0
## 782 3 0 1 0
## 783 5 0 0 0
## 784 5 0 0 0
## 785 4 0 0 1
## 786 5 0 0 0
## 787 5 0 0 0
## 788 5 0 0 0
## 789 5 0 0 0
## 790 5 0 0 0
## 791 4 0 0 1
## 792 5 0 0 0
## train.labels.5
## 1 0
## 2 0
## 3 0
## 4 0
## 5 0
## 6 0
## 7 1
## 8 0
## 9 0
## 10 1
## 11 0
## 12 0
## 13 1
## 14 0
## 15 0
## 16 0
## 17 0
## 18 0
## 19 0
## 20 0
## 21 1
## 22 1
## 23 1
## 24 1
## 25 1
## 26 0
## 27 0
## 28 0
## 29 0
## 30 0
## 31 0
## 32 1
## 33 0
## 34 0
## 35 0
## 36 1
## 37 0
## 38 0
## 39 0
## 40 0
## 41 1
## 42 0
## 43 1
## 44 0
## 45 1
## 46 1
## 47 0
## 48 1
## 49 0
## 50 0
## 51 0
## 52 0
## 53 0
## 54 0
## 55 1
## 56 1
## 57 1
## 58 0
## 59 0
## 60 1
## 61 0
## 62 0
## 63 0
## 64 0
## 65 1
## 66 0
## 67 0
## 68 1
## 69 1
## 70 1
## 71 0
## 72 1
## 73 0
## 74 0
## 75 0
## 76 0
## 77 0
## 78 0
## 79 0
## 80 0
## 81 1
## 82 1
## 83 1
## 84 0
## 85 0
## 86 0
## 87 0
## 88 1
## 89 1
## 90 0
## 91 1
## 92 0
## 93 0
## 94 1
## 95 1
## 96 0
## 97 0
## 98 1
## 99 1
## 100 1
## 101 0
## 102 0
## 103 0
## 104 0
## 105 0
## 106 0
## 107 1
## 108 1
## 109 0
## 110 0
## 111 0
## 112 1
## 113 1
## 114 1
## 115 0
## 116 1
## 117 0
## 118 0
## 119 1
## 120 1
## 121 0
## 122 0
## 123 1
## 124 0
## 125 1
## 126 0
## 127 0
## 128 0
## 129 1
## 130 0
## 131 1
## 132 0
## 133 0
## 134 1
## 135 0
## 136 1
## 137 0
## 138 0
## 139 0
## 140 1
## 141 0
## 142 0
## 143 1
## 144 1
## 145 0
## 146 1
## 147 1
## 148 1
## 149 0
## 150 0
## 151 0
## 152 0
## 153 0
## 154 0
## 155 0
## 156 1
## 157 0
## 158 0
## 159 1
## 160 1
## 161 0
## 162 1
## 163 1
## 164 0
## 165 0
## 166 1
## 167 0
## 168 0
## 169 1
## 170 0
## 171 1
## 172 1
## 173 0
## 174 0
## 175 0
## 176 1
## 177 0
## 178 1
## 179 1
## 180 1
## 181 1
## 182 1
## 183 1
## 184 1
## 185 0
## 186 1
## 187 1
## 188 1
## 189 1
## 190 0
## 191 1
## 192 0
## 193 1
## 194 0
## 195 0
## 196 0
## 197 1
## 198 1
## 199 1
## 200 1
## 201 0
## 202 0
## 203 0
## 204 0
## 205 1
## 206 1
## 207 1
## 208 0
## 209 1
## 210 0
## 211 1
## 212 0
## 213 0
## 214 0
## 215 0
## 216 0
## 217 0
## 218 1
## 219 0
## 220 1
## 221 1
## 222 1
## 223 0
## 224 1
## 225 1
## 226 1
## 227 1
## 228 1
## 229 0
## 230 1
## 231 0
## 232 0
## 233 1
## 234 1
## 235 1
## 236 1
## 237 1
## 238 1
## 239 1
## 240 1
## 241 0
## 242 1
## 243 0
## 244 0
## 245 0
## 246 1
## 247 1
## 248 0
## 249 1
## 250 0
## 251 1
## 252 1
## 253 1
## 254 1
## 255 0
## 256 0
## 257 0
## 258 1
## 259 1
## 260 1
## 261 0
## 262 0
## 263 1
## 264 1
## 265 0
## 266 0
## 267 0
## 268 0
## 269 0
## 270 1
## 271 0
## 272 0
## 273 1
## 274 0
## 275 0
## 276 1
## 277 0
## 278 1
## 279 0
## 280 0
## 281 1
## 282 1
## 283 0
## 284 1
## 285 1
## 286 0
## 287 0
## 288 1
## 289 1
## 290 1
## 291 0
## 292 1
## 293 1
## 294 1
## 295 1
## 296 0
## 297 0
## 298 0
## 299 0
## 300 0
## 301 0
## 302 1
## 303 1
## 304 0
## 305 0
## 306 1
## 307 1
## 308 0
## 309 1
## 310 0
## 311 1
## 312 1
## 313 1
## 314 1
## 315 0
## 316 1
## 317 1
## 318 1
## 319 1
## 320 0
## 321 1
## 322 1
## 323 0
## 324 1
## 325 1
## 326 1
## 327 1
## 328 1
## 329 1
## 330 1
## 331 1
## 332 0
## 333 1
## 334 1
## 335 0
## 336 1
## 337 0
## 338 0
## 339 0
## 340 1
## 341 0
## 342 1
## 343 0
## 344 0
## 345 1
## 346 0
## 347 0
## 348 0
## 349 0
## 350 1
## 351 1
## 352 0
## 353 0
## 354 1
## 355 0
## 356 0
## 357 1
## 358 1
## 359 1
## 360 1
## 361 1
## 362 0
## 363 1
## 364 0
## 365 1
## 366 1
## 367 1
## 368 1
## 369 0
## 370 1
## 371 0
## 372 1
## 373 0
## 374 0
## 375 1
## 376 1
## 377 0
## 378 0
## 379 0
## 380 0
## 381 1
## 382 0
## 383 1
## 384 0
## 385 0
## 386 0
## 387 1
## 388 1
## 389 0
## 390 0
## 391 0
## 392 1
## 393 0
## 394 0
## 395 1
## 396 1
## 397 0
## 398 0
## 399 0
## 400 0
## 401 1
## 402 1
## 403 1
## 404 1
## 405 1
## 406 0
## 407 1
## 408 1
## 409 0
## 410 0
## 411 1
## 412 1
## 413 1
## 414 0
## 415 1
## 416 0
## 417 1
## 418 0
## 419 1
## 420 0
## 421 1
## 422 1
## 423 0
## 424 1
## 425 0
## 426 0
## 427 1
## 428 1
## 429 0
## 430 1
## 431 1
## 432 1
## 433 0
## 434 0
## 435 0
## 436 1
## 437 0
## 438 0
## 439 1
## 440 0
## 441 1
## 442 0
## 443 1
## 444 0
## 445 1
## 446 1
## 447 1
## 448 1
## 449 1
## 450 1
## 451 1
## 452 1
## 453 1
## 454 1
## 455 1
## 456 1
## 457 1
## 458 1
## 459 0
## 460 1
## 461 1
## 462 1
## 463 1
## 464 0
## 465 1
## 466 1
## 467 0
## 468 1
## 469 0
## 470 1
## 471 1
## 472 1
## 473 1
## 474 0
## 475 1
## 476 0
## 477 0
## 478 1
## 479 0
## 480 1
## 481 0
## 482 1
## 483 1
## 484 1
## 485 1
## 486 1
## 487 1
## 488 0
## 489 1
## 490 0
## 491 1
## 492 1
## 493 1
## 494 1
## 495 0
## 496 1
## 497 0
## 498 0
## 499 0
## 500 1
## 501 1
## 502 0
## 503 0
## 504 0
## 505 1
## 506 1
## 507 0
## 508 1
## 509 0
## 510 1
## 511 0
## 512 1
## 513 1
## 514 1
## 515 1
## 516 1
## 517 0
## 518 0
## 519 1
## 520 0
## 521 0
## 522 0
## 523 1
## 524 0
## 525 1
## 526 0
## 527 1
## 528 0
## 529 0
## 530 0
## 531 0
## 532 1
## 533 0
## 534 0
## 535 1
## 536 1
## 537 0
## 538 1
## 539 1
## 540 0
## 541 1
## 542 1
## 543 0
## 544 0
## 545 1
## 546 1
## 547 1
## 548 0
## 549 1
## 550 1
## 551 0
## 552 1
## 553 1
## 554 0
## 555 0
## 556 0
## 557 0
## 558 0
## 559 0
## 560 1
## 561 1
## 562 1
## 563 0
## 564 0
## 565 1
## 566 0
## 567 0
## 568 1
## 569 0
## 570 1
## 571 0
## 572 1
## 573 0
## 574 0
## 575 1
## 576 1
## 577 1
## 578 1
## 579 1
## 580 1
## 581 1
## 582 0
## 583 0
## 584 1
## 585 0
## 586 1
## 587 1
## 588 0
## 589 1
## 590 1
## 591 1
## 592 0
## 593 1
## 594 1
## 595 1
## 596 1
## 597 0
## 598 0
## 599 0
## 600 0
## 601 1
## 602 1
## 603 1
## 604 0
## 605 0
## 606 1
## 607 1
## 608 0
## 609 0
## 610 1
## 611 1
## 612 0
## 613 1
## 614 1
## 615 1
## 616 1
## 617 0
## 618 1
## 619 0
## 620 0
## 621 1
## 622 0
## 623 1
## 624 0
## 625 0
## 626 1
## 627 1
## 628 0
## 629 1
## 630 0
## 631 0
## 632 1
## 633 1
## 634 1
## 635 1
## 636 1
## 637 1
## 638 0
## 639 1
## 640 1
## 641 0
## 642 1
## 643 0
## 644 0
## 645 1
## 646 1
## 647 0
## 648 1
## 649 1
## 650 1
## 651 1
## 652 1
## 653 1
## 654 1
## 655 1
## 656 0
## 657 1
## 658 1
## 659 1
## 660 0
## 661 1
## 662 1
## 663 1
## 664 1
## 665 0
## 666 1
## 667 0
## 668 1
## 669 0
## 670 1
## 671 0
## 672 1
## 673 1
## 674 1
## 675 1
## 676 1
## 677 0
## 678 1
## 679 0
## 680 1
## 681 1
## 682 0
## 683 0
## 684 0
## 685 1
## 686 1
## 687 0
## 688 0
## 689 0
## 690 1
## 691 1
## 692 0
## 693 1
## 694 1
## 695 1
## 696 1
## 697 1
## 698 1
## 699 1
## 700 1
## 701 1
## 702 0
## 703 1
## 704 1
## 705 0
## 706 0
## 707 1
## 708 1
## 709 0
## 710 0
## 711 1
## 712 1
## 713 0
## 714 1
## 715 1
## 716 1
## 717 1
## 718 1
## 719 1
## 720 0
## 721 1
## 722 1
## 723 1
## 724 0
## 725 1
## 726 1
## 727 0
## 728 0
## 729 1
## 730 1
## 731 0
## 732 0
## 733 1
## 734 1
## 735 1
## 736 1
## 737 1
## 738 1
## 739 1
## 740 1
## 741 1
## 742 1
## 743 1
## 744 1
## 745 1
## 746 0
## 747 0
## 748 1
## 749 1
## 750 1
## 751 1
## 752 1
## 753 1
## 754 1
## 755 1
## 756 1
## 757 0
## 758 0
## 759 1
## 760 0
## 761 0
## 762 1
## 763 0
## 764 1
## 765 1
## 766 0
## 767 0
## 768 0
## 769 0
## 770 1
## 771 0
## 772 1
## 773 0
## 774 1
## 775 0
## 776 0
## 777 1
## 778 1
## 779 0
## 780 1
## 781 1
## 782 0
## 783 1
## 784 1
## 785 0
## 786 1
## 787 1
## 788 1
## 789 1
## 790 1
## 791 0
## 792 1
table(All)
## All
## 2 3 4 5
## 34 112 309 545
NB2 <- naiveBayes(x = train,y = train.labels.2)
NB3 <- naiveBayes(x = train,y = train.labels.3)
NB4 <- naiveBayes(x = train,y = train.labels.4)
NB5 <- naiveBayes(x = train,y = train.labels.5)
NB.Pred2 <- predict(NB2, test,type ="raw")
NB.Pred3 <- predict(NB3, test,type ="raw")
NB.Pred4 <- predict(NB4, test,type ="raw")
NB.Pred5 <- predict(NB5, test,type ="raw")
Voting.df <- data.frame(NB.Pred2, NB.Pred3,NB.Pred4,NB.Pred5)
colnames(Voting.df) <- c("Class 2: 0","Class2: 1","Class 3: 0","Class3: 1","Class 4: 0","Class4: 1","Class 5: 0","Class5: 1")
head(Voting.df)
## Class 2: 0 Class2: 1 Class 3: 0 Class3: 1 Class 4: 0 Class4: 1
## 1 1.000000 5.085547e-17 0.6298040 3.701960e-01 0.000286024 9.997140e-01
## 2 1.000000 5.132245e-17 1.0000000 4.599657e-08 0.999991388 8.612184e-06
## 3 1.000000 3.700019e-19 0.9999990 9.936373e-07 0.998186130 1.813870e-03
## 4 1.000000 2.147626e-12 1.0000000 3.340118e-08 0.999786944 2.130561e-04
## 5 0.999998 2.038750e-06 0.9572649 4.273511e-02 0.924065820 7.593418e-02
## 6 1.000000 2.207549e-14 0.8393824 1.606176e-01 0.990092212 9.907788e-03
## Class 5: 0 Class5: 1
## 1 1.000000e+00 3.590963e-13
## 2 3.135763e-05 9.999686e-01
## 3 1.690535e-03 9.983095e-01
## 4 9.879116e-01 1.208836e-02
## 5 9.771605e-01 2.283952e-02
## 6 9.790841e-01 2.091591e-02
Transformed.Voting.df <- Voting.df[seq(2,8,2)]
colnames(Transformed.Voting.df) <- c("2","3","4","5")
head(Transformed.Voting.df)
## 2 3 4 5
## 1 5.085547e-17 3.701960e-01 9.997140e-01 3.590963e-13
## 2 5.132245e-17 4.599657e-08 8.612184e-06 9.999686e-01
## 3 3.700019e-19 9.936373e-07 1.813870e-03 9.983095e-01
## 4 2.147626e-12 3.340118e-08 2.130561e-04 1.208836e-02
## 5 2.038750e-06 4.273511e-02 7.593418e-02 2.283952e-02
## 6 2.207549e-14 1.606176e-01 9.907788e-03 2.091591e-02
Evaluation <- Transformed.Voting.df
Index <- as.numeric(apply(Transformed.Voting.df,MARGIN = 1,which.max))
Index <- Index+1
Evaluation$Vote <- Index
Evaluation$Actual <- test.labels
head(Evaluation,100)
## 2 3 4 5 Vote Actual
## 1 5.085547e-17 3.701960e-01 9.997140e-01 3.590963e-13 4 4
## 2 5.132245e-17 4.599657e-08 8.612184e-06 9.999686e-01 5 5
## 3 3.700019e-19 9.936373e-07 1.813870e-03 9.983095e-01 5 5
## 4 2.147626e-12 3.340118e-08 2.130561e-04 1.208836e-02 5 4
## 5 2.038750e-06 4.273511e-02 7.593418e-02 2.283952e-02 4 4
## 6 2.207549e-14 1.606176e-01 9.907788e-03 2.091591e-02 3 4
## 7 4.482175e-13 2.370420e-12 6.236825e-05 1.245384e-07 4 5
## 8 7.010302e-15 2.305498e-01 9.995794e-01 1.821500e-12 4 3
## 9 2.666696e-18 6.226118e-10 1.220446e-01 9.574927e-01 5 5
## 10 8.590340e-11 8.176714e-01 5.506424e-06 1.829595e-06 3 3
## 11 3.251670e-20 1.609038e-06 7.436063e-01 4.022727e-01 4 4
## 12 1.654985e-09 4.789040e-04 8.064601e-01 1.324861e-02 4 3
## 13 4.284540e-16 4.708144e-04 2.058908e-03 9.915386e-01 5 5
## 14 1.362932e-07 1.593681e-01 5.024963e-01 1.348096e-06 4 4
## 15 7.337653e-09 9.614237e-01 8.627804e-02 7.833711e-04 3 4
## 16 4.676808e-22 2.868356e-09 1.408753e-01 1.768022e-01 5 5
## 17 5.303401e-16 1.852825e-03 4.265215e-03 1.858882e-01 5 4
## 18 3.852268e-13 1.808042e-07 8.917750e-02 9.934811e-01 5 5
## 19 3.267149e-11 1.970857e-01 6.554830e-01 1.114613e-03 4 4
## 20 4.136127e-12 2.650023e-04 2.222324e-01 7.808297e-01 5 2
## 21 6.024745e-18 5.846514e-04 9.814748e-01 1.836108e-07 4 4
## 22 2.320499e-26 6.617775e-05 6.039513e-01 6.540577e-01 5 4
## 23 3.814672e-05 9.997395e-01 9.591423e-01 5.332633e-14 3 3
## 24 3.789730e-21 9.933866e-07 7.629092e-02 9.997544e-01 5 5
## 25 5.086220e-13 1.211110e-01 2.123943e-01 6.831716e-02 4 4
## 26 1.579914e-13 3.460893e-03 1.345149e-01 7.337430e-01 5 5
## 27 5.664919e-14 3.303206e-08 7.974759e-01 8.140434e-01 5 4
## 28 3.674182e-11 2.664631e-05 4.726364e-03 9.389100e-01 5 5
## 29 1.387636e-01 5.359010e-03 4.580873e-02 3.974114e-09 2 3
## 30 6.277883e-08 3.647405e-01 9.671337e-02 4.384247e-16 3 3
## 31 1.094219e-15 1.561389e-06 1.153268e-03 9.998369e-01 5 5
## 32 1.541516e-15 1.542555e-04 4.474201e-05 9.994096e-01 5 5
## 33 6.268604e-19 2.390352e-05 9.747067e-01 8.376245e-01 4 5
## 34 9.790813e-15 9.882669e-01 3.106137e-01 8.173468e-06 3 3
## 35 1.235895e-13 8.076226e-01 1.816510e-01 4.928246e-03 3 4
## 36 9.031086e-17 7.988430e-01 8.089730e-01 1.021642e-02 4 4
## 37 1.750806e-09 5.202050e-06 9.997740e-01 4.396381e-10 4 5
## 38 9.992761e-01 9.970891e-01 4.176811e-01 1.274321e-09 2 3
## 39 5.150557e-19 6.957228e-07 2.171225e-04 9.997894e-01 5 5
## 40 3.958031e-12 4.853673e-03 9.995448e-01 6.062867e-09 4 3
## 41 8.404244e-17 6.694519e-07 4.159107e-04 9.999791e-01 5 5
## 42 2.692873e-19 8.047690e-01 8.441047e-01 2.180265e-04 4 4
## 43 3.892315e-13 1.792629e-14 6.688933e-01 4.107468e-05 4 5
## 44 2.106748e-12 9.247941e-04 9.164247e-01 1.379230e-02 4 3
## 45 1.670353e-12 1.036277e-02 9.772343e-01 4.470117e-02 4 5
## 46 1.305538e-08 3.928772e-03 8.622768e-01 3.508936e-02 4 4
## 47 9.798676e-14 2.207403e-05 8.905776e-01 8.112612e-01 4 5
## 48 2.039372e-12 2.551856e-01 9.606574e-02 3.601320e-01 5 3
## 49 1.549235e-15 1.339567e-06 6.812237e-03 9.999814e-01 5 5
## 50 2.383341e-10 1.242242e-05 4.351616e-03 9.861694e-01 5 5
## 51 2.635576e-16 8.217595e-06 8.852001e-03 9.999521e-01 5 4
## 52 1.117418e-11 2.944834e-06 9.903020e-01 3.529115e-02 4 5
## 53 6.313133e-15 7.282005e-03 4.413418e-03 9.901109e-01 5 4
## 54 8.894070e-16 1.278438e-06 9.499966e-01 7.447794e-01 4 4
## 55 5.177564e-14 1.076860e-02 1.406200e-01 8.916769e-01 5 4
## 56 1.865504e-15 1.589108e-06 4.092820e-04 9.999943e-01 5 5
## 57 5.122882e-14 1.302572e-05 1.891993e-02 9.998712e-01 5 2
## 58 2.164496e-16 1.092532e-07 1.171357e-02 9.999674e-01 5 5
## 59 6.207655e-17 2.012663e-06 3.730958e-03 9.999892e-01 5 4
## 60 3.703190e-13 2.437926e-01 9.307587e-01 1.639783e-02 4 4
## 61 1.415214e-13 1.901461e-05 1.681163e-02 9.998169e-01 5 5
## 62 1.293811e-14 1.767515e-04 6.445276e-04 9.997690e-01 5 5
## 63 9.310234e-11 6.329144e-03 6.298900e-01 7.757074e-01 5 4
## 64 2.896868e-17 1.968502e-06 3.382364e-01 9.984799e-01 5 5
## 65 6.053089e-18 2.891696e-07 2.210123e-03 9.999923e-01 5 5
## 66 3.309020e-16 1.884755e-09 1.635538e-03 9.999985e-01 5 5
## 67 6.675858e-11 8.610071e-06 7.294747e-02 9.806083e-01 5 3
## 68 8.217342e-12 1.167028e-02 1.310860e-01 9.411885e-01 5 3
## 69 3.026150e-15 3.020766e-04 8.940019e-02 9.791349e-01 5 4
## 70 8.983756e-20 1.430739e-07 1.213226e-01 9.983816e-01 5 4
## 71 4.253927e-15 3.342130e-07 6.278586e-03 9.999808e-01 5 5
## 72 5.791291e-17 1.732431e-07 3.073322e-01 9.983583e-01 5 5
## 73 1.479631e-10 1.816138e-03 1.820248e-01 9.101305e-01 5 2
## 74 3.166153e-10 1.507340e-03 1.128675e-01 9.375731e-01 5 4
## 75 1.904695e-15 1.904132e-06 4.503018e-01 9.964554e-01 5 4
## 76 4.843697e-17 1.278937e-06 4.524668e-01 8.225276e-01 5 5
## 77 2.294928e-17 1.820332e-07 1.430221e-02 1.518888e-02 5 3
## 78 1.115960e-17 4.824486e-05 4.280223e-01 5.877863e-01 5 4
## 79 1.903349e-21 9.560638e-01 8.773338e-01 1.574645e-09 3 5
## 80 3.855061e-16 1.048221e-09 1.132342e-03 9.999966e-01 5 5
## 81 8.883978e-20 1.589786e-07 1.134751e-01 9.955866e-01 5 5
## 82 6.441821e-20 3.352122e-06 5.545280e-02 9.997791e-01 5 4
## 83 5.528756e-21 2.233603e-10 1.848684e-01 6.007386e-07 4 5
## 84 1.937846e-14 4.552182e-06 1.285113e-03 9.987333e-01 5 5
## 85 1.315913e-16 4.164742e-04 1.748855e-01 8.887877e-01 5 5
## 86 1.069401e-17 2.003729e-05 6.124875e-03 9.186108e-05 4 3
## 87 2.600914e-03 8.057695e-04 9.540313e-01 3.636497e-09 4 4
## 88 2.777176e-15 6.089107e-05 3.118155e-03 5.353485e-02 5 5
## 89 1.596456e-23 2.389506e-07 1.993851e-02 9.999079e-01 5 5
## 90 7.658002e-15 6.973426e-06 7.934139e-01 1.453197e-02 4 3
## 91 1.299444e-16 1.912535e-01 7.684274e-02 9.110384e-01 5 3
## 92 2.630457e-17 1.541552e-02 2.788443e-01 2.168203e-01 4 5
## 93 6.329448e-21 7.566772e-04 9.955079e-01 6.995435e-04 4 4
## 94 2.410715e-17 2.322570e-12 1.189808e-02 9.204965e-05 4 5
## 95 1.195888e-12 1.073497e-04 3.422524e-04 9.867647e-01 5 5
## 96 3.491141e-25 2.754688e-07 3.699775e-01 9.978946e-01 5 5
## 97 1.046920e-20 4.734983e-05 5.089225e-02 9.952704e-01 5 5
## 98 3.780183e-14 8.511085e-06 3.782028e-03 9.809567e-01 5 5
## 99 6.130152e-05 3.122695e-03 1.535329e-03 1.779499e-01 5 2
## 100 4.924314e-15 1.405987e-02 5.583749e-01 4.365679e-01 4 5
CM <- table(Evaluation$Actual,Evaluation$Vote)
CM
##
## 2 3 4 5
## 2 0 0 1 6
## 3 2 6 8 9
## 4 0 5 24 33
## 5 0 2 17 95
#Proportions
Overall <- length(Evaluation$Actual)
Length2 <- length(which(Evaluation$Actual==2))
Length3 <- length(which(Evaluation$Actual==3))
Length4 <- length(which(Evaluation$Actual==4))
Length5 <- length(which(Evaluation$Actual==5))
#Accuracy
Accuracy <- sum(diag(CM))/sum(CM)
#Precision
Precision <- diag(CM)/rowSums(CM)
Precision <- (Precision[1]*Length2+Precision[2]*Length3+Precision[3]*Length4+Precision[4]*Length5)/208
#Recall
Recall <- diag(CM)/colSums(CM)
Recall <- (Recall[1]*Length2+Recall[2]*Length3+Recall[3]*Length4+Recall[4]*Length5)/208
Accuracy
## [1] 0.6009615
Precision
## 2
## 0.6009615
Recall
## 2
## 0.5626573