setwd("~/Google Drive/UM/Smart Services/Thesis/Thesis/Code/Naive Bayes/2.Feature Set 1/TFIDF/10")
#install.packages("naivebayes")
library(naivebayes)
## Warning: package 'naivebayes' was built under R version 3.4.3
library(dplyr)
## Warning: Installed Rcpp (0.12.16) different from Rcpp used to build dplyr (0.12.11).
## Please reinstall dplyr to avoid random crashes or undefined behavior.
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
library(psych)
library(ggplot2)
##
## Attaching package: 'ggplot2'
## The following objects are masked from 'package:psych':
##
## %+%, alpha
library(e1071)
library(readxl)
#Import Labels
Labels <- read_excel("~/Google Drive/UM/Smart Services/Thesis/Thesis/Code/Naive Bayes/1.Labels/Source Data.xlsx")
Label <- Labels$Score
#Import Features
Features <- read.csv("~/Google Drive/UM/Smart Services/Thesis/Thesis/Code/Naive Bayes/2.Feature Set 1/TFIDF/10/Feature Set 1: 10th TFIDF.csv")
Features <- Features[-1]
#Class 2
Label2 <- list()
for(i in 1:1000){
if(Label[i]==3| Label[i]==4){
Label2[i] <- 1
}else{
Label2[i] <- 0
}
}
#As Factor
Label2 <- as.factor(unlist(Label2))
#Class 3
Label3 <- list()
for(i in 1:1000){
if(Label[i]==5| Label[i]==6){
Label3[i] <- 1
}else{
Label3[i] <- 0
}
}
#As Factor
Label3 <- as.factor(unlist(Label3))
#Class 4
Label4 <- list()
for(i in 1:1000){
if(Label[i]==7| Label[i]==8){
Label4[i] <- 1
}else{
Label4[i] <- 0
}
}
#As Factor
Label4 <- as.factor(unlist(Label4))
#Class 5
Label5 <- list()
for(i in 1:1000){
if(Label[i]==9| Label[i]==10){
Label5[i] <- 1
}else{
Label5[i] <- 0
}
}
#As Factor
Label5 <- as.factor(unlist(Label5))
#All Labels
All <- list()
for(i in 1:1000){
if(Label[i]==9| Label[i]==10){
All[i] <- 5
}else if(Label[i]==7| Label[i]==8){
All[i] <- 4
}else if(Label[i]==5| Label[i]==6){
All[i] <- 3
}else{
All[i] <- 2
}
}
#As Factor
All <- as.factor(unlist(All))
#Control
Control.df <- data.frame(matrix(seq(1,1000),ncol=1,nrow=1000))
Control.df$Actual <- Label
Control.df$All <- All
Control.df$Label2 <- Label2
Control.df$Label3 <- Label3
Control.df$Label4 <- Label4
Control.df$Label5 <- Label5
Control.df[1:10,2:7]
## Actual All Label2 Label3 Label4 Label5
## 1 3 2 1 0 0 0
## 2 8 4 0 0 1 0
## 3 7 4 0 0 1 0
## 4 4 2 1 0 0 0
## 5 7 4 0 0 1 0
## 6 7 4 0 0 1 0
## 7 5 3 0 1 0 0
## 8 10 5 0 0 0 1
## 9 7 4 0 0 1 0
## 10 8 4 0 0 1 0
#Transform Integer to Factor
for(i in 1:2396){
Features[,i] <- as.factor(Features[,i])
}
str(Features)
## 'data.frame': 1000 obs. of 2396 variables:
## $ abit : Factor w/ 3 levels "0","0.199239650770269",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ abl : Factor w/ 13 levels "0","0.0350594603513238",..: 2 1 1 1 1 1 1 1 1 1 ...
## $ about : Factor w/ 2 levels "0","0.71184173461872": 1 1 1 1 1 1 1 1 1 1 ...
## $ abov : Factor w/ 2 levels "0","0.398631371386483": 1 1 1 1 1 1 1 1 1 1 ...
## $ absolut : Factor w/ 10 levels "0","0.0363483389163396",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ accent : Factor w/ 3 levels "0","0.373574345194254",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ accept : Factor w/ 4 levels "0","0.133028917205412",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ access : Factor w/ 29 levels "0","0.0374430565263987",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ accid : Factor w/ 2 levels "0","0.293111302490061": 1 1 1 1 1 1 1 1 1 1 ...
## $ accommod : Factor w/ 15 levels "0","0.0542344025878372",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ accomplish : Factor w/ 2 levels "0","0.586222604980123": 1 1 1 1 1 1 1 1 1 1 ...
## $ accur : Factor w/ 2 levels "0","0.369103121654151": 1 1 1 1 1 1 1 1 1 1 ...
## $ acess : Factor w/ 2 levels "0","0.35592086730936": 1 1 1 1 1 1 1 1 1 1 ...
## $ ach : Factor w/ 3 levels "0","0.199239650770269",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ acknowledg : Factor w/ 4 levels "0","0.0468202334298376",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ acomod : Factor w/ 2 levels "0","0.199315685693242": 1 1 1 1 1 1 1 1 1 1 ...
## $ across : Factor w/ 4 levels "0","0.100973756433023",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ activ : Factor w/ 2 levels "0","0.553654682481227": 1 1 1 1 1 1 1 1 1 1 ...
## $ actual : Factor w/ 10 levels "0","0.0365047043394216",..: 1 1 1 1 1 1 1 1 10 1 ...
## $ adaptor : Factor w/ 2 levels "0","0.284736693847488": 1 1 1 1 1 1 1 1 1 1 ...
## $ add : Factor w/ 3 levels "0","0.218677665479563",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ addit : Factor w/ 4 levels "0","0.0941665368982127",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ adequ : Factor w/ 7 levels "0","0.127829095760794",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ adjac : Factor w/ 5 levels "0","0.0926253986588615",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ adjust : Factor w/ 2 levels "0","0.321476912408454": 1 1 1 1 1 1 1 1 1 1 ...
## $ ador : Factor w/ 2 levels "0","0.71184173461872": 1 1 1 1 1 1 1 1 1 1 ...
## $ adult : Factor w/ 4 levels "0","0.152378577889835",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ advanc : Factor w/ 7 levels "0","0.0441965376283888",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ advantag : Factor w/ 2 levels "0","0.321476912408454": 1 1 1 1 1 1 1 1 1 1 ...
## $ adverti : Factor w/ 8 levels "0","0.0522513092160911",..: 1 1 1 1 3 1 1 1 1 1 ...
## $ advi : Factor w/ 4 levels "0","0.0805848248455859",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ advic : Factor w/ 7 levels "0","0.0709694402302013",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ affect : Factor w/ 6 levels "0","0.0920946528888521",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ afford : Factor w/ 3 levels "0","0.0896578428466209",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ afraid : Factor w/ 3 levels "0","0.075342725081194",..: 1 1 1 2 1 1 1 1 1 1 ...
## $ africa : Factor w/ 2 levels "0","0.383299395563926": 1 1 1 1 1 1 1 1 1 1 ...
## $ after : Factor w/ 6 levels "0","0.112409649849628",..: 1 1 1 1 2 1 1 1 1 1 ...
## $ afternoon : Factor w/ 3 levels "0","0.224144607116552",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ afterward : Factor w/ 2 levels "0","0.498289214233104": 1 1 1 1 1 1 1 1 1 1 ...
## $ age : Factor w/ 2 levels "0","0.332192809488736": 1 1 1 1 1 1 1 1 1 1 ...
## $ ago : Factor w/ 3 levels "0","0.25616526527606",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ agr : Factor w/ 2 levels "0","0.150996731585789": 1 1 1 1 1 1 1 1 1 1 ...
## $ agreeabl : Factor w/ 2 levels "0","0.498289214233104": 1 1 1 1 1 1 1 1 1 1 ...
## $ ahead : Factor w/ 2 levels "0","0.664385618977473": 1 1 1 1 1 1 1 1 1 1 ...
## $ air : Factor w/ 23 levels "0","0.0694786237666482",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ aircon : Factor w/ 3 levels "0","0.320206581595075",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ aircondit : Factor w/ 5 levels "0","0.177017428548046",..: 1 1 1 1 1 1 1 1 1 3 ...
## $ airi : Factor w/ 4 levels "0","0.0805848248455859",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ airless : Factor w/ 2 levels "0","0.369103121654151": 1 1 1 1 1 1 1 1 1 1 ...
## $ airport : Factor w/ 18 levels "0","0.0323791021408926",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ alarm : Factor w/ 4 levels "0","0.209520544598523",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ albeit : Factor w/ 2 levels "0","0.181196077902947": 1 1 1 1 1 1 1 1 1 1 ...
## $ albert : Factor w/ 3 levels "0","0.119543790462161",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ albrt : Factor w/ 2 levels "0","0.586222604980123": 1 1 1 1 1 1 1 1 1 1 ...
## $ alcohol : Factor w/ 2 levels "0","0.41524101186092": 1 1 1 1 1 1 1 1 1 1 ...
## $ aldo : Factor w/ 2 levels "0","0.766598791127853": 1 1 1 1 1 1 1 1 1 1 ...
## $ alittl : Factor w/ 2 levels "0","0.905980389514735": 1 1 1 1 1 1 1 1 1 1 ...
## $ all : Factor w/ 15 levels "0","0.0441909947012006",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ allevi : Factor w/ 2 levels "0","0.163373512863313": 1 1 1 1 1 1 1 1 1 1 ...
## $ alloc : Factor w/ 5 levels "0","0.26552614282207",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ allow : Factor w/ 11 levels "0","0.0962877708663004",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ almost : Factor w/ 3 levels "0","0.0896578428466209",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ along : Factor w/ 3 levels "0","0.135845216434274",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ alot : Factor w/ 2 levels "0","0.20762050593046": 1 1 1 1 1 1 1 1 1 1 ...
## $ alreadi : Factor w/ 7 levels "0","0.0620237124700919",..: 1 1 1 2 1 1 1 1 1 1 ...
## $ alright : Factor w/ 2 levels "0","0.171823866976933": 1 1 1 1 1 1 1 1 1 1 ...
## $ also : Factor w/ 56 levels "0","0.0203241634283911",..: 1 1 1 31 1 1 41 1 50 1 ...
## $ altern : Factor w/ 4 levels "0","0.126982148241529",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ although : Factor w/ 22 levels "0","0.0505889368905357",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ alway : Factor w/ 19 levels "0","0.0418063421464794",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ amaz : Factor w/ 32 levels "0","0.0329053505822598",..: 1 1 1 1 7 1 1 1 1 1 ...
## $ ambianc : Factor w/ 2 levels "0","0.996578428466209": 1 1 1 1 1 1 1 1 1 1 ...
## $ ambienc : Factor w/ 3 levels "0","0.249049563462836",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ amen : Factor w/ 9 levels "0","0.0703614574208292",..: 1 3 1 1 1 1 1 1 1 1 ...
## $ amend : Factor w/ 2 levels "0","0.47456115641248": 1 1 1 1 1 1 1 1 1 1 ...
## $ american : Factor w/ 4 levels "0","0.074828765928044",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ amount : Factor w/ 3 levels "0","0.0536873310458808",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ amsterdam : Factor w/ 22 levels "0","0.0329721716528431",..: 1 6 1 4 1 1 1 1 1 1 ...
## $ and : Factor w/ 18 levels "0","0.0310101988449161",..: 2 1 1 1 1 1 1 1 1 1 ...
## $ angl : Factor w/ 2 levels "0","0.216647484449176": 1 1 1 1 1 1 1 1 1 1 ...
## $ ann : Factor w/ 2 levels "0","0.553654682481227": 1 1 1 1 1 1 1 1 1 1 ...
## $ anna : Factor w/ 3 levels "0","0.640413163190149",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ annex : Factor w/ 2 levels "0","0.181196077902947": 1 1 1 1 1 1 1 1 1 1 ...
## $ announc : Factor w/ 2 levels "0","0.191649697781963": 1 1 1 1 1 1 1 1 1 1 ...
## $ annoy : Factor w/ 10 levels "0","0.0357491904726637",..: 2 1 1 1 1 1 1 1 1 1 ...
## $ anoth : Factor w/ 15 levels "0","0.0368768225305658",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ ansterdam : Factor w/ 2 levels "0","0.163373512863313": 1 1 1 1 1 1 1 1 1 1 ...
## $ answer : Factor w/ 7 levels "0","0.054672753955118",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ ant : Factor w/ 2 levels "0","1.99315685693242": 1 1 1 1 1 1 1 1 1 1 ...
## $ anymor : Factor w/ 6 levels "0","0.0419992098339271",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ anyon : Factor w/ 11 levels "0","0.067109658482573",..: 1 5 1 1 1 1 1 1 1 1 ...
## $ anyth : Factor w/ 14 levels "0","0.0368768225305658",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ anyway : Factor w/ 6 levels "0","0.0457715939507469",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ anywh : Factor w/ 4 levels "0","0.0838082178394093",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ apart : Factor w/ 9 levels "0","0.0557262742772967",..: 1 1 1 1 1 1 1 7 1 1 ...
## $ apex : Factor w/ 2 levels "0","0.203383352748206": 1 1 1 1 1 1 1 1 1 1 ...
## $ apolog : Factor w/ 4 levels "0","0.0997716879040587",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ appal : Factor w/ 3 levels "0","0.640413163190149",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ appar : Factor w/ 3 levels "0","0.169165741220039",..: 1 1 1 1 1 1 1 1 1 1 ...
## [list output truncated]
#Features
set.seed(1234)
ind <- sample(2,nrow(Features),replace = T, prob =c(0.8,0.2))
train <- Features[ind == 1,]
test <- Features[ind ==2,]
train.labels.2 <- Label2[ind == 1]
test.labels.2 <- Label2[ind ==2]
train.labels.3 <- Label3[ind == 1]
test.labels.3 <- Label3[ind ==2]
train.labels.4 <- Label4[ind == 1]
test.labels.4 <- Label4[ind ==2]
train.labels.5 <- Label5[ind == 1]
test.labels.5 <- Label5[ind ==2]
train.labels <- All[ind == 1]
test.labels <- All[ind ==2]
data.frame(train.labels,train.labels.2,train.labels.3,train.labels.4,train.labels.5)
## train.labels train.labels.2 train.labels.3 train.labels.4
## 1 2 1 0 0
## 2 4 0 0 1
## 3 4 0 0 1
## 4 2 1 0 0
## 5 4 0 0 1
## 6 3 0 1 0
## 7 5 0 0 0
## 8 4 0 0 1
## 9 4 0 0 1
## 10 5 0 0 0
## 11 3 0 1 0
## 12 3 0 1 0
## 13 5 0 0 0
## 14 3 0 1 0
## 15 4 0 0 1
## 16 4 0 0 1
## 17 4 0 0 1
## 18 3 0 1 0
## 19 2 1 0 0
## 20 3 0 1 0
## 21 5 0 0 0
## 22 5 0 0 0
## 23 5 0 0 0
## 24 5 0 0 0
## 25 5 0 0 0
## 26 4 0 0 1
## 27 2 1 0 0
## 28 4 0 0 1
## 29 4 0 0 1
## 30 4 0 0 1
## 31 4 0 0 1
## 32 5 0 0 0
## 33 4 0 0 1
## 34 4 0 0 1
## 35 2 1 0 0
## 36 5 0 0 0
## 37 2 1 0 0
## 38 4 0 0 1
## 39 2 1 0 0
## 40 4 0 0 1
## 41 5 0 0 0
## 42 3 0 1 0
## 43 5 0 0 0
## 44 4 0 0 1
## 45 5 0 0 0
## 46 5 0 0 0
## 47 4 0 0 1
## 48 5 0 0 0
## 49 4 0 0 1
## 50 4 0 0 1
## 51 4 0 0 1
## 52 4 0 0 1
## 53 4 0 0 1
## 54 3 0 1 0
## 55 5 0 0 0
## 56 5 0 0 0
## 57 5 0 0 0
## 58 4 0 0 1
## 59 4 0 0 1
## 60 5 0 0 0
## 61 4 0 0 1
## 62 4 0 0 1
## 63 3 0 1 0
## 64 4 0 0 1
## 65 5 0 0 0
## 66 4 0 0 1
## 67 4 0 0 1
## 68 5 0 0 0
## 69 5 0 0 0
## 70 5 0 0 0
## 71 3 0 1 0
## 72 5 0 0 0
## 73 4 0 0 1
## 74 4 0 0 1
## 75 3 0 1 0
## 76 3 0 1 0
## 77 2 1 0 0
## 78 3 0 1 0
## 79 2 1 0 0
## 80 4 0 0 1
## 81 5 0 0 0
## 82 5 0 0 0
## 83 5 0 0 0
## 84 3 0 1 0
## 85 3 0 1 0
## 86 3 0 1 0
## 87 3 0 1 0
## 88 5 0 0 0
## 89 5 0 0 0
## 90 4 0 0 1
## 91 5 0 0 0
## 92 4 0 0 1
## 93 2 1 0 0
## 94 5 0 0 0
## 95 5 0 0 0
## 96 4 0 0 1
## 97 3 0 1 0
## 98 5 0 0 0
## 99 5 0 0 0
## 100 5 0 0 0
## 101 4 0 0 1
## 102 4 0 0 1
## 103 3 0 1 0
## 104 3 0 1 0
## 105 4 0 0 1
## 106 3 0 1 0
## 107 5 0 0 0
## 108 5 0 0 0
## 109 4 0 0 1
## 110 4 0 0 1
## 111 4 0 0 1
## 112 5 0 0 0
## 113 5 0 0 0
## 114 5 0 0 0
## 115 3 0 1 0
## 116 5 0 0 0
## 117 4 0 0 1
## 118 3 0 1 0
## 119 5 0 0 0
## 120 5 0 0 0
## 121 2 1 0 0
## 122 3 0 1 0
## 123 5 0 0 0
## 124 3 0 1 0
## 125 5 0 0 0
## 126 4 0 0 1
## 127 4 0 0 1
## 128 4 0 0 1
## 129 5 0 0 0
## 130 2 1 0 0
## 131 5 0 0 0
## 132 4 0 0 1
## 133 3 0 1 0
## 134 5 0 0 0
## 135 4 0 0 1
## 136 5 0 0 0
## 137 4 0 0 1
## 138 4 0 0 1
## 139 2 1 0 0
## 140 5 0 0 0
## 141 4 0 0 1
## 142 2 1 0 0
## 143 5 0 0 0
## 144 5 0 0 0
## 145 4 0 0 1
## 146 5 0 0 0
## 147 5 0 0 0
## 148 5 0 0 0
## 149 4 0 0 1
## 150 4 0 0 1
## 151 4 0 0 1
## 152 3 0 1 0
## 153 4 0 0 1
## 154 4 0 0 1
## 155 2 1 0 0
## 156 5 0 0 0
## 157 2 1 0 0
## 158 3 0 1 0
## 159 5 0 0 0
## 160 5 0 0 0
## 161 4 0 0 1
## 162 5 0 0 0
## 163 5 0 0 0
## 164 3 0 1 0
## 165 3 0 1 0
## 166 5 0 0 0
## 167 4 0 0 1
## 168 3 0 1 0
## 169 5 0 0 0
## 170 4 0 0 1
## 171 5 0 0 0
## 172 5 0 0 0
## 173 3 0 1 0
## 174 4 0 0 1
## 175 4 0 0 1
## 176 5 0 0 0
## 177 3 0 1 0
## 178 5 0 0 0
## 179 5 0 0 0
## 180 5 0 0 0
## 181 5 0 0 0
## 182 5 0 0 0
## 183 5 0 0 0
## 184 5 0 0 0
## 185 4 0 0 1
## 186 5 0 0 0
## 187 5 0 0 0
## 188 5 0 0 0
## 189 5 0 0 0
## 190 4 0 0 1
## 191 5 0 0 0
## 192 4 0 0 1
## 193 5 0 0 0
## 194 4 0 0 1
## 195 4 0 0 1
## 196 4 0 0 1
## 197 5 0 0 0
## 198 5 0 0 0
## 199 5 0 0 0
## 200 5 0 0 0
## 201 4 0 0 1
## 202 4 0 0 1
## 203 4 0 0 1
## 204 3 0 1 0
## 205 5 0 0 0
## 206 5 0 0 0
## 207 5 0 0 0
## 208 3 0 1 0
## 209 5 0 0 0
## 210 4 0 0 1
## 211 5 0 0 0
## 212 3 0 1 0
## 213 4 0 0 1
## 214 4 0 0 1
## 215 3 0 1 0
## 216 4 0 0 1
## 217 4 0 0 1
## 218 5 0 0 0
## 219 2 1 0 0
## 220 5 0 0 0
## 221 5 0 0 0
## 222 5 0 0 0
## 223 4 0 0 1
## 224 5 0 0 0
## 225 5 0 0 0
## 226 5 0 0 0
## 227 5 0 0 0
## 228 5 0 0 0
## 229 2 1 0 0
## 230 5 0 0 0
## 231 4 0 0 1
## 232 4 0 0 1
## 233 5 0 0 0
## 234 5 0 0 0
## 235 5 0 0 0
## 236 5 0 0 0
## 237 5 0 0 0
## 238 5 0 0 0
## 239 5 0 0 0
## 240 5 0 0 0
## 241 4 0 0 1
## 242 5 0 0 0
## 243 3 0 1 0
## 244 3 0 1 0
## 245 3 0 1 0
## 246 5 0 0 0
## 247 5 0 0 0
## 248 3 0 1 0
## 249 5 0 0 0
## 250 4 0 0 1
## 251 5 0 0 0
## 252 5 0 0 0
## 253 5 0 0 0
## 254 5 0 0 0
## 255 3 0 1 0
## 256 4 0 0 1
## 257 4 0 0 1
## 258 5 0 0 0
## 259 5 0 0 0
## 260 5 0 0 0
## 261 4 0 0 1
## 262 4 0 0 1
## 263 5 0 0 0
## 264 5 0 0 0
## 265 2 1 0 0
## 266 4 0 0 1
## 267 2 1 0 0
## 268 4 0 0 1
## 269 2 1 0 0
## 270 5 0 0 0
## 271 4 0 0 1
## 272 2 1 0 0
## 273 5 0 0 0
## 274 3 0 1 0
## 275 3 0 1 0
## 276 5 0 0 0
## 277 2 1 0 0
## 278 5 0 0 0
## 279 4 0 0 1
## 280 3 0 1 0
## 281 5 0 0 0
## 282 5 0 0 0
## 283 3 0 1 0
## 284 5 0 0 0
## 285 5 0 0 0
## 286 3 0 1 0
## 287 3 0 1 0
## 288 5 0 0 0
## 289 5 0 0 0
## 290 5 0 0 0
## 291 3 0 1 0
## 292 5 0 0 0
## 293 5 0 0 0
## 294 5 0 0 0
## 295 5 0 0 0
## 296 3 0 1 0
## 297 3 0 1 0
## 298 4 0 0 1
## 299 4 0 0 1
## 300 4 0 0 1
## 301 4 0 0 1
## 302 5 0 0 0
## 303 5 0 0 0
## 304 4 0 0 1
## 305 3 0 1 0
## 306 5 0 0 0
## 307 5 0 0 0
## 308 3 0 1 0
## 309 5 0 0 0
## 310 4 0 0 1
## 311 5 0 0 0
## 312 5 0 0 0
## 313 5 0 0 0
## 314 5 0 0 0
## 315 3 0 1 0
## 316 5 0 0 0
## 317 5 0 0 0
## 318 5 0 0 0
## 319 5 0 0 0
## 320 4 0 0 1
## 321 5 0 0 0
## 322 5 0 0 0
## 323 4 0 0 1
## 324 5 0 0 0
## 325 5 0 0 0
## 326 5 0 0 0
## 327 5 0 0 0
## 328 5 0 0 0
## 329 5 0 0 0
## 330 5 0 0 0
## 331 5 0 0 0
## 332 4 0 0 1
## 333 5 0 0 0
## 334 5 0 0 0
## 335 4 0 0 1
## 336 5 0 0 0
## 337 4 0 0 1
## 338 4 0 0 1
## 339 3 0 1 0
## 340 5 0 0 0
## 341 4 0 0 1
## 342 5 0 0 0
## 343 4 0 0 1
## 344 4 0 0 1
## 345 5 0 0 0
## 346 3 0 1 0
## 347 4 0 0 1
## 348 4 0 0 1
## 349 4 0 0 1
## 350 5 0 0 0
## 351 5 0 0 0
## 352 4 0 0 1
## 353 4 0 0 1
## 354 5 0 0 0
## 355 4 0 0 1
## 356 4 0 0 1
## 357 5 0 0 0
## 358 5 0 0 0
## 359 5 0 0 0
## 360 5 0 0 0
## 361 5 0 0 0
## 362 4 0 0 1
## 363 5 0 0 0
## 364 4 0 0 1
## 365 5 0 0 0
## 366 5 0 0 0
## 367 5 0 0 0
## 368 5 0 0 0
## 369 4 0 0 1
## 370 5 0 0 0
## 371 4 0 0 1
## 372 5 0 0 0
## 373 3 0 1 0
## 374 4 0 0 1
## 375 5 0 0 0
## 376 5 0 0 0
## 377 4 0 0 1
## 378 4 0 0 1
## 379 4 0 0 1
## 380 3 0 1 0
## 381 5 0 0 0
## 382 4 0 0 1
## 383 5 0 0 0
## 384 3 0 1 0
## 385 3 0 1 0
## 386 4 0 0 1
## 387 5 0 0 0
## 388 5 0 0 0
## 389 3 0 1 0
## 390 4 0 0 1
## 391 4 0 0 1
## 392 5 0 0 0
## 393 3 0 1 0
## 394 4 0 0 1
## 395 5 0 0 0
## 396 5 0 0 0
## 397 3 0 1 0
## 398 4 0 0 1
## 399 4 0 0 1
## 400 4 0 0 1
## 401 5 0 0 0
## 402 5 0 0 0
## 403 5 0 0 0
## 404 5 0 0 0
## 405 5 0 0 0
## 406 3 0 1 0
## 407 5 0 0 0
## 408 5 0 0 0
## 409 4 0 0 1
## 410 4 0 0 1
## 411 5 0 0 0
## 412 5 0 0 0
## 413 5 0 0 0
## 414 2 1 0 0
## 415 5 0 0 0
## 416 4 0 0 1
## 417 5 0 0 0
## 418 4 0 0 1
## 419 5 0 0 0
## 420 4 0 0 1
## 421 5 0 0 0
## 422 5 0 0 0
## 423 3 0 1 0
## 424 5 0 0 0
## 425 4 0 0 1
## 426 4 0 0 1
## 427 5 0 0 0
## 428 5 0 0 0
## 429 3 0 1 0
## 430 5 0 0 0
## 431 5 0 0 0
## 432 5 0 0 0
## 433 3 0 1 0
## 434 4 0 0 1
## 435 4 0 0 1
## 436 5 0 0 0
## 437 4 0 0 1
## 438 4 0 0 1
## 439 5 0 0 0
## 440 4 0 0 1
## 441 5 0 0 0
## 442 3 0 1 0
## 443 5 0 0 0
## 444 4 0 0 1
## 445 5 0 0 0
## 446 5 0 0 0
## 447 5 0 0 0
## 448 5 0 0 0
## 449 5 0 0 0
## 450 5 0 0 0
## 451 5 0 0 0
## 452 5 0 0 0
## 453 5 0 0 0
## 454 5 0 0 0
## 455 5 0 0 0
## 456 5 0 0 0
## 457 5 0 0 0
## 458 5 0 0 0
## 459 4 0 0 1
## 460 5 0 0 0
## 461 5 0 0 0
## 462 5 0 0 0
## 463 5 0 0 0
## 464 3 0 1 0
## 465 5 0 0 0
## 466 5 0 0 0
## 467 4 0 0 1
## 468 5 0 0 0
## 469 4 0 0 1
## 470 5 0 0 0
## 471 5 0 0 0
## 472 5 0 0 0
## 473 5 0 0 0
## 474 4 0 0 1
## 475 5 0 0 0
## 476 4 0 0 1
## 477 4 0 0 1
## 478 5 0 0 0
## 479 2 1 0 0
## 480 5 0 0 0
## 481 4 0 0 1
## 482 5 0 0 0
## 483 5 0 0 0
## 484 5 0 0 0
## 485 5 0 0 0
## 486 5 0 0 0
## 487 5 0 0 0
## 488 3 0 1 0
## 489 5 0 0 0
## 490 3 0 1 0
## 491 5 0 0 0
## 492 5 0 0 0
## 493 5 0 0 0
## 494 5 0 0 0
## 495 4 0 0 1
## 496 5 0 0 0
## 497 3 0 1 0
## 498 4 0 0 1
## 499 4 0 0 1
## 500 5 0 0 0
## 501 5 0 0 0
## 502 4 0 0 1
## 503 4 0 0 1
## 504 4 0 0 1
## 505 5 0 0 0
## 506 5 0 0 0
## 507 4 0 0 1
## 508 5 0 0 0
## 509 4 0 0 1
## 510 5 0 0 0
## 511 4 0 0 1
## 512 5 0 0 0
## 513 5 0 0 0
## 514 5 0 0 0
## 515 5 0 0 0
## 516 5 0 0 0
## 517 4 0 0 1
## 518 4 0 0 1
## 519 5 0 0 0
## 520 2 1 0 0
## 521 2 1 0 0
## 522 4 0 0 1
## 523 5 0 0 0
## 524 4 0 0 1
## 525 5 0 0 0
## 526 4 0 0 1
## 527 5 0 0 0
## 528 4 0 0 1
## 529 4 0 0 1
## 530 4 0 0 1
## 531 4 0 0 1
## 532 5 0 0 0
## 533 4 0 0 1
## 534 4 0 0 1
## 535 5 0 0 0
## 536 5 0 0 0
## 537 4 0 0 1
## 538 5 0 0 0
## 539 5 0 0 0
## 540 3 0 1 0
## 541 5 0 0 0
## 542 5 0 0 0
## 543 4 0 0 1
## 544 4 0 0 1
## 545 5 0 0 0
## 546 5 0 0 0
## 547 5 0 0 0
## 548 4 0 0 1
## 549 5 0 0 0
## 550 5 0 0 0
## 551 4 0 0 1
## 552 5 0 0 0
## 553 5 0 0 0
## 554 4 0 0 1
## 555 3 0 1 0
## 556 4 0 0 1
## 557 3 0 1 0
## 558 4 0 0 1
## 559 4 0 0 1
## 560 5 0 0 0
## 561 5 0 0 0
## 562 5 0 0 0
## 563 4 0 0 1
## 564 4 0 0 1
## 565 5 0 0 0
## 566 4 0 0 1
## 567 4 0 0 1
## 568 5 0 0 0
## 569 3 0 1 0
## 570 5 0 0 0
## 571 4 0 0 1
## 572 5 0 0 0
## 573 4 0 0 1
## 574 4 0 0 1
## 575 5 0 0 0
## 576 5 0 0 0
## 577 5 0 0 0
## 578 5 0 0 0
## 579 5 0 0 0
## 580 5 0 0 0
## 581 5 0 0 0
## 582 4 0 0 1
## 583 4 0 0 1
## 584 5 0 0 0
## 585 3 0 1 0
## 586 5 0 0 0
## 587 5 0 0 0
## 588 4 0 0 1
## 589 5 0 0 0
## 590 5 0 0 0
## 591 5 0 0 0
## 592 4 0 0 1
## 593 5 0 0 0
## 594 5 0 0 0
## 595 5 0 0 0
## 596 5 0 0 0
## 597 4 0 0 1
## 598 4 0 0 1
## 599 3 0 1 0
## 600 4 0 0 1
## 601 5 0 0 0
## 602 5 0 0 0
## 603 5 0 0 0
## 604 4 0 0 1
## 605 4 0 0 1
## 606 5 0 0 0
## 607 5 0 0 0
## 608 3 0 1 0
## 609 4 0 0 1
## 610 5 0 0 0
## 611 5 0 0 0
## 612 4 0 0 1
## 613 5 0 0 0
## 614 5 0 0 0
## 615 5 0 0 0
## 616 5 0 0 0
## 617 3 0 1 0
## 618 5 0 0 0
## 619 4 0 0 1
## 620 4 0 0 1
## 621 5 0 0 0
## 622 4 0 0 1
## 623 5 0 0 0
## 624 4 0 0 1
## 625 4 0 0 1
## 626 5 0 0 0
## 627 5 0 0 0
## 628 4 0 0 1
## 629 5 0 0 0
## 630 4 0 0 1
## 631 4 0 0 1
## 632 5 0 0 0
## 633 5 0 0 0
## 634 5 0 0 0
## 635 5 0 0 0
## 636 5 0 0 0
## 637 5 0 0 0
## 638 4 0 0 1
## 639 5 0 0 0
## 640 5 0 0 0
## 641 3 0 1 0
## 642 5 0 0 0
## 643 4 0 0 1
## 644 3 0 1 0
## 645 5 0 0 0
## 646 5 0 0 0
## 647 4 0 0 1
## 648 5 0 0 0
## 649 5 0 0 0
## 650 5 0 0 0
## 651 5 0 0 0
## 652 5 0 0 0
## 653 5 0 0 0
## 654 5 0 0 0
## 655 5 0 0 0
## 656 4 0 0 1
## 657 5 0 0 0
## 658 5 0 0 0
## 659 5 0 0 0
## 660 3 0 1 0
## 661 5 0 0 0
## 662 5 0 0 0
## 663 5 0 0 0
## 664 5 0 0 0
## 665 4 0 0 1
## 666 5 0 0 0
## 667 4 0 0 1
## 668 5 0 0 0
## 669 4 0 0 1
## 670 5 0 0 0
## 671 4 0 0 1
## 672 5 0 0 0
## 673 5 0 0 0
## 674 5 0 0 0
## 675 5 0 0 0
## 676 5 0 0 0
## 677 4 0 0 1
## 678 5 0 0 0
## 679 4 0 0 1
## 680 5 0 0 0
## 681 5 0 0 0
## 682 3 0 1 0
## 683 4 0 0 1
## 684 4 0 0 1
## 685 5 0 0 0
## 686 5 0 0 0
## 687 4 0 0 1
## 688 4 0 0 1
## 689 4 0 0 1
## 690 5 0 0 0
## 691 5 0 0 0
## 692 4 0 0 1
## 693 5 0 0 0
## 694 5 0 0 0
## 695 5 0 0 0
## 696 5 0 0 0
## 697 5 0 0 0
## 698 5 0 0 0
## 699 5 0 0 0
## 700 5 0 0 0
## 701 5 0 0 0
## 702 4 0 0 1
## 703 5 0 0 0
## 704 5 0 0 0
## 705 4 0 0 1
## 706 4 0 0 1
## 707 5 0 0 0
## 708 5 0 0 0
## 709 4 0 0 1
## 710 4 0 0 1
## 711 5 0 0 0
## 712 5 0 0 0
## 713 3 0 1 0
## 714 5 0 0 0
## 715 5 0 0 0
## 716 5 0 0 0
## 717 5 0 0 0
## 718 5 0 0 0
## 719 5 0 0 0
## 720 4 0 0 1
## 721 5 0 0 0
## 722 5 0 0 0
## 723 5 0 0 0
## 724 4 0 0 1
## 725 5 0 0 0
## 726 5 0 0 0
## 727 4 0 0 1
## 728 3 0 1 0
## 729 5 0 0 0
## 730 5 0 0 0
## 731 4 0 0 1
## 732 4 0 0 1
## 733 5 0 0 0
## 734 5 0 0 0
## 735 5 0 0 0
## 736 5 0 0 0
## 737 5 0 0 0
## 738 5 0 0 0
## 739 5 0 0 0
## 740 5 0 0 0
## 741 5 0 0 0
## 742 5 0 0 0
## 743 5 0 0 0
## 744 5 0 0 0
## 745 5 0 0 0
## 746 4 0 0 1
## 747 4 0 0 1
## 748 5 0 0 0
## 749 5 0 0 0
## 750 5 0 0 0
## 751 5 0 0 0
## 752 5 0 0 0
## 753 5 0 0 0
## 754 5 0 0 0
## 755 5 0 0 0
## 756 5 0 0 0
## 757 4 0 0 1
## 758 4 0 0 1
## 759 5 0 0 0
## 760 4 0 0 1
## 761 4 0 0 1
## 762 5 0 0 0
## 763 4 0 0 1
## 764 5 0 0 0
## 765 5 0 0 0
## 766 4 0 0 1
## 767 4 0 0 1
## 768 4 0 0 1
## 769 4 0 0 1
## 770 5 0 0 0
## 771 4 0 0 1
## 772 5 0 0 0
## 773 4 0 0 1
## 774 5 0 0 0
## 775 4 0 0 1
## 776 4 0 0 1
## 777 5 0 0 0
## 778 5 0 0 0
## 779 4 0 0 1
## 780 5 0 0 0
## 781 5 0 0 0
## 782 3 0 1 0
## 783 5 0 0 0
## 784 5 0 0 0
## 785 4 0 0 1
## 786 5 0 0 0
## 787 5 0 0 0
## 788 5 0 0 0
## 789 5 0 0 0
## 790 5 0 0 0
## 791 4 0 0 1
## 792 5 0 0 0
## train.labels.5
## 1 0
## 2 0
## 3 0
## 4 0
## 5 0
## 6 0
## 7 1
## 8 0
## 9 0
## 10 1
## 11 0
## 12 0
## 13 1
## 14 0
## 15 0
## 16 0
## 17 0
## 18 0
## 19 0
## 20 0
## 21 1
## 22 1
## 23 1
## 24 1
## 25 1
## 26 0
## 27 0
## 28 0
## 29 0
## 30 0
## 31 0
## 32 1
## 33 0
## 34 0
## 35 0
## 36 1
## 37 0
## 38 0
## 39 0
## 40 0
## 41 1
## 42 0
## 43 1
## 44 0
## 45 1
## 46 1
## 47 0
## 48 1
## 49 0
## 50 0
## 51 0
## 52 0
## 53 0
## 54 0
## 55 1
## 56 1
## 57 1
## 58 0
## 59 0
## 60 1
## 61 0
## 62 0
## 63 0
## 64 0
## 65 1
## 66 0
## 67 0
## 68 1
## 69 1
## 70 1
## 71 0
## 72 1
## 73 0
## 74 0
## 75 0
## 76 0
## 77 0
## 78 0
## 79 0
## 80 0
## 81 1
## 82 1
## 83 1
## 84 0
## 85 0
## 86 0
## 87 0
## 88 1
## 89 1
## 90 0
## 91 1
## 92 0
## 93 0
## 94 1
## 95 1
## 96 0
## 97 0
## 98 1
## 99 1
## 100 1
## 101 0
## 102 0
## 103 0
## 104 0
## 105 0
## 106 0
## 107 1
## 108 1
## 109 0
## 110 0
## 111 0
## 112 1
## 113 1
## 114 1
## 115 0
## 116 1
## 117 0
## 118 0
## 119 1
## 120 1
## 121 0
## 122 0
## 123 1
## 124 0
## 125 1
## 126 0
## 127 0
## 128 0
## 129 1
## 130 0
## 131 1
## 132 0
## 133 0
## 134 1
## 135 0
## 136 1
## 137 0
## 138 0
## 139 0
## 140 1
## 141 0
## 142 0
## 143 1
## 144 1
## 145 0
## 146 1
## 147 1
## 148 1
## 149 0
## 150 0
## 151 0
## 152 0
## 153 0
## 154 0
## 155 0
## 156 1
## 157 0
## 158 0
## 159 1
## 160 1
## 161 0
## 162 1
## 163 1
## 164 0
## 165 0
## 166 1
## 167 0
## 168 0
## 169 1
## 170 0
## 171 1
## 172 1
## 173 0
## 174 0
## 175 0
## 176 1
## 177 0
## 178 1
## 179 1
## 180 1
## 181 1
## 182 1
## 183 1
## 184 1
## 185 0
## 186 1
## 187 1
## 188 1
## 189 1
## 190 0
## 191 1
## 192 0
## 193 1
## 194 0
## 195 0
## 196 0
## 197 1
## 198 1
## 199 1
## 200 1
## 201 0
## 202 0
## 203 0
## 204 0
## 205 1
## 206 1
## 207 1
## 208 0
## 209 1
## 210 0
## 211 1
## 212 0
## 213 0
## 214 0
## 215 0
## 216 0
## 217 0
## 218 1
## 219 0
## 220 1
## 221 1
## 222 1
## 223 0
## 224 1
## 225 1
## 226 1
## 227 1
## 228 1
## 229 0
## 230 1
## 231 0
## 232 0
## 233 1
## 234 1
## 235 1
## 236 1
## 237 1
## 238 1
## 239 1
## 240 1
## 241 0
## 242 1
## 243 0
## 244 0
## 245 0
## 246 1
## 247 1
## 248 0
## 249 1
## 250 0
## 251 1
## 252 1
## 253 1
## 254 1
## 255 0
## 256 0
## 257 0
## 258 1
## 259 1
## 260 1
## 261 0
## 262 0
## 263 1
## 264 1
## 265 0
## 266 0
## 267 0
## 268 0
## 269 0
## 270 1
## 271 0
## 272 0
## 273 1
## 274 0
## 275 0
## 276 1
## 277 0
## 278 1
## 279 0
## 280 0
## 281 1
## 282 1
## 283 0
## 284 1
## 285 1
## 286 0
## 287 0
## 288 1
## 289 1
## 290 1
## 291 0
## 292 1
## 293 1
## 294 1
## 295 1
## 296 0
## 297 0
## 298 0
## 299 0
## 300 0
## 301 0
## 302 1
## 303 1
## 304 0
## 305 0
## 306 1
## 307 1
## 308 0
## 309 1
## 310 0
## 311 1
## 312 1
## 313 1
## 314 1
## 315 0
## 316 1
## 317 1
## 318 1
## 319 1
## 320 0
## 321 1
## 322 1
## 323 0
## 324 1
## 325 1
## 326 1
## 327 1
## 328 1
## 329 1
## 330 1
## 331 1
## 332 0
## 333 1
## 334 1
## 335 0
## 336 1
## 337 0
## 338 0
## 339 0
## 340 1
## 341 0
## 342 1
## 343 0
## 344 0
## 345 1
## 346 0
## 347 0
## 348 0
## 349 0
## 350 1
## 351 1
## 352 0
## 353 0
## 354 1
## 355 0
## 356 0
## 357 1
## 358 1
## 359 1
## 360 1
## 361 1
## 362 0
## 363 1
## 364 0
## 365 1
## 366 1
## 367 1
## 368 1
## 369 0
## 370 1
## 371 0
## 372 1
## 373 0
## 374 0
## 375 1
## 376 1
## 377 0
## 378 0
## 379 0
## 380 0
## 381 1
## 382 0
## 383 1
## 384 0
## 385 0
## 386 0
## 387 1
## 388 1
## 389 0
## 390 0
## 391 0
## 392 1
## 393 0
## 394 0
## 395 1
## 396 1
## 397 0
## 398 0
## 399 0
## 400 0
## 401 1
## 402 1
## 403 1
## 404 1
## 405 1
## 406 0
## 407 1
## 408 1
## 409 0
## 410 0
## 411 1
## 412 1
## 413 1
## 414 0
## 415 1
## 416 0
## 417 1
## 418 0
## 419 1
## 420 0
## 421 1
## 422 1
## 423 0
## 424 1
## 425 0
## 426 0
## 427 1
## 428 1
## 429 0
## 430 1
## 431 1
## 432 1
## 433 0
## 434 0
## 435 0
## 436 1
## 437 0
## 438 0
## 439 1
## 440 0
## 441 1
## 442 0
## 443 1
## 444 0
## 445 1
## 446 1
## 447 1
## 448 1
## 449 1
## 450 1
## 451 1
## 452 1
## 453 1
## 454 1
## 455 1
## 456 1
## 457 1
## 458 1
## 459 0
## 460 1
## 461 1
## 462 1
## 463 1
## 464 0
## 465 1
## 466 1
## 467 0
## 468 1
## 469 0
## 470 1
## 471 1
## 472 1
## 473 1
## 474 0
## 475 1
## 476 0
## 477 0
## 478 1
## 479 0
## 480 1
## 481 0
## 482 1
## 483 1
## 484 1
## 485 1
## 486 1
## 487 1
## 488 0
## 489 1
## 490 0
## 491 1
## 492 1
## 493 1
## 494 1
## 495 0
## 496 1
## 497 0
## 498 0
## 499 0
## 500 1
## 501 1
## 502 0
## 503 0
## 504 0
## 505 1
## 506 1
## 507 0
## 508 1
## 509 0
## 510 1
## 511 0
## 512 1
## 513 1
## 514 1
## 515 1
## 516 1
## 517 0
## 518 0
## 519 1
## 520 0
## 521 0
## 522 0
## 523 1
## 524 0
## 525 1
## 526 0
## 527 1
## 528 0
## 529 0
## 530 0
## 531 0
## 532 1
## 533 0
## 534 0
## 535 1
## 536 1
## 537 0
## 538 1
## 539 1
## 540 0
## 541 1
## 542 1
## 543 0
## 544 0
## 545 1
## 546 1
## 547 1
## 548 0
## 549 1
## 550 1
## 551 0
## 552 1
## 553 1
## 554 0
## 555 0
## 556 0
## 557 0
## 558 0
## 559 0
## 560 1
## 561 1
## 562 1
## 563 0
## 564 0
## 565 1
## 566 0
## 567 0
## 568 1
## 569 0
## 570 1
## 571 0
## 572 1
## 573 0
## 574 0
## 575 1
## 576 1
## 577 1
## 578 1
## 579 1
## 580 1
## 581 1
## 582 0
## 583 0
## 584 1
## 585 0
## 586 1
## 587 1
## 588 0
## 589 1
## 590 1
## 591 1
## 592 0
## 593 1
## 594 1
## 595 1
## 596 1
## 597 0
## 598 0
## 599 0
## 600 0
## 601 1
## 602 1
## 603 1
## 604 0
## 605 0
## 606 1
## 607 1
## 608 0
## 609 0
## 610 1
## 611 1
## 612 0
## 613 1
## 614 1
## 615 1
## 616 1
## 617 0
## 618 1
## 619 0
## 620 0
## 621 1
## 622 0
## 623 1
## 624 0
## 625 0
## 626 1
## 627 1
## 628 0
## 629 1
## 630 0
## 631 0
## 632 1
## 633 1
## 634 1
## 635 1
## 636 1
## 637 1
## 638 0
## 639 1
## 640 1
## 641 0
## 642 1
## 643 0
## 644 0
## 645 1
## 646 1
## 647 0
## 648 1
## 649 1
## 650 1
## 651 1
## 652 1
## 653 1
## 654 1
## 655 1
## 656 0
## 657 1
## 658 1
## 659 1
## 660 0
## 661 1
## 662 1
## 663 1
## 664 1
## 665 0
## 666 1
## 667 0
## 668 1
## 669 0
## 670 1
## 671 0
## 672 1
## 673 1
## 674 1
## 675 1
## 676 1
## 677 0
## 678 1
## 679 0
## 680 1
## 681 1
## 682 0
## 683 0
## 684 0
## 685 1
## 686 1
## 687 0
## 688 0
## 689 0
## 690 1
## 691 1
## 692 0
## 693 1
## 694 1
## 695 1
## 696 1
## 697 1
## 698 1
## 699 1
## 700 1
## 701 1
## 702 0
## 703 1
## 704 1
## 705 0
## 706 0
## 707 1
## 708 1
## 709 0
## 710 0
## 711 1
## 712 1
## 713 0
## 714 1
## 715 1
## 716 1
## 717 1
## 718 1
## 719 1
## 720 0
## 721 1
## 722 1
## 723 1
## 724 0
## 725 1
## 726 1
## 727 0
## 728 0
## 729 1
## 730 1
## 731 0
## 732 0
## 733 1
## 734 1
## 735 1
## 736 1
## 737 1
## 738 1
## 739 1
## 740 1
## 741 1
## 742 1
## 743 1
## 744 1
## 745 1
## 746 0
## 747 0
## 748 1
## 749 1
## 750 1
## 751 1
## 752 1
## 753 1
## 754 1
## 755 1
## 756 1
## 757 0
## 758 0
## 759 1
## 760 0
## 761 0
## 762 1
## 763 0
## 764 1
## 765 1
## 766 0
## 767 0
## 768 0
## 769 0
## 770 1
## 771 0
## 772 1
## 773 0
## 774 1
## 775 0
## 776 0
## 777 1
## 778 1
## 779 0
## 780 1
## 781 1
## 782 0
## 783 1
## 784 1
## 785 0
## 786 1
## 787 1
## 788 1
## 789 1
## 790 1
## 791 0
## 792 1
table(All)
## All
## 2 3 4 5
## 34 112 309 545
NB2 <- naiveBayes(x = train,y = train.labels.2)
NB3 <- naiveBayes(x = train,y = train.labels.3)
NB4 <- naiveBayes(x = train,y = train.labels.4)
NB5 <- naiveBayes(x = train,y = train.labels.5)
NB.Pred2 <- predict(NB2, test,type ="raw")
NB.Pred3 <- predict(NB3, test,type ="raw")
NB.Pred4 <- predict(NB4, test,type ="raw")
NB.Pred5 <- predict(NB5, test,type ="raw")
Voting.df <- data.frame(NB.Pred2, NB.Pred3,NB.Pred4,NB.Pred5)
colnames(Voting.df) <- c("Class 2: 0","Class2: 1","Class 3: 0","Class3: 1","Class 4: 0","Class4: 1","Class 5: 0","Class5: 1")
head(Voting.df)
## Class 2: 0 Class2: 1 Class 3: 0 Class3: 1 Class 4: 0 Class4: 1
## 1 1 8.676202e-12 0.9999061 9.391955e-05 0.0009227284 0.999077272
## 2 1 1.203093e-09 0.9994303 5.697425e-04 0.9666246326 0.033375367
## 3 1 1.313707e-11 0.9963059 3.694125e-03 0.9887541607 0.011245839
## 4 1 3.869451e-12 0.2152280 7.847720e-01 0.5646777162 0.435322284
## 5 1 6.351230e-10 0.9999985 1.471869e-06 0.9945943335 0.005405667
## 6 1 5.117065e-14 0.9999996 3.921571e-07 0.9981965049 0.001803495
## Class 5: 0 Class5: 1
## 1 6.904764e-01 0.3095236
## 2 9.071920e-02 0.9092808
## 3 9.938362e-04 0.9990062
## 4 7.296700e-01 0.2703300
## 5 2.276723e-04 0.9997723
## 6 1.127997e-06 0.9999989
Transformed.Voting.df <- Voting.df[seq(2,8,2)]
colnames(Transformed.Voting.df) <- c("2","3","4","5")
head(Transformed.Voting.df)
## 2 3 4 5
## 1 8.676202e-12 9.391955e-05 0.999077272 0.3095236
## 2 1.203093e-09 5.697425e-04 0.033375367 0.9092808
## 3 1.313707e-11 3.694125e-03 0.011245839 0.9990062
## 4 3.869451e-12 7.847720e-01 0.435322284 0.2703300
## 5 6.351230e-10 1.471869e-06 0.005405667 0.9997723
## 6 5.117065e-14 3.921571e-07 0.001803495 0.9999989
Evaluation <- Transformed.Voting.df
Index <- as.numeric(apply(Transformed.Voting.df,MARGIN = 1,which.max))
Index <- Index+1
Evaluation$Vote <- Index
Evaluation$Actual <- test.labels
head(Evaluation,100)
## 2 3 4 5 Vote Actual
## 1 8.676202e-12 9.391955e-05 9.990773e-01 0.309523649 4 4
## 2 1.203093e-09 5.697425e-04 3.337537e-02 0.909280801 5 5
## 3 1.313707e-11 3.694125e-03 1.124584e-02 0.999006164 5 5
## 4 3.869451e-12 7.847720e-01 4.353223e-01 0.270330036 3 4
## 5 6.351230e-10 1.471869e-06 5.405667e-03 0.999772328 5 4
## 6 5.117065e-14 3.921571e-07 1.803495e-03 0.999998872 5 4
## 7 3.190159e-10 9.619174e-04 9.947708e-02 0.996037399 5 5
## 8 5.423927e-11 7.320322e-04 1.180217e-01 0.996411145 5 3
## 9 2.214680e-16 1.800219e-08 1.929954e-03 0.999999491 5 5
## 10 1.065984e-10 8.646170e-04 7.072121e-02 0.997634968 5 3
## 11 5.667426e-14 9.845672e-07 4.227896e-02 0.999890076 5 4
## 12 2.522543e-14 8.345028e-02 8.942315e-04 0.998578262 5 3
## 13 1.596546e-12 2.663140e-07 4.017636e-03 0.999989319 5 5
## 14 2.217741e-12 5.158214e-03 1.764989e-02 0.998471738 5 4
## 15 2.941318e-13 7.276332e-05 9.301738e-01 0.931125617 5 4
## 16 1.204301e-14 9.564039e-01 1.706593e-06 0.999243534 5 5
## 17 9.946473e-17 5.625676e-09 3.105547e-03 0.999997813 5 4
## 18 1.893770e-12 3.684857e-05 9.780251e-03 0.999925505 5 5
## 19 3.812310e-14 2.980716e-04 6.360569e-01 0.944268231 5 4
## 20 2.185146e-15 3.942539e-08 1.632847e-01 0.999898479 5 2
## 21 1.631182e-13 7.225201e-01 9.348081e-01 0.014953521 4 4
## 22 1.112200e-14 2.147408e-04 9.385137e-01 0.816534603 4 4
## 23 4.546608e-11 1.588310e-04 1.632468e-01 0.997223763 5 3
## 24 1.551716e-16 4.868950e-02 4.330922e-02 0.980018114 5 5
## 25 8.220387e-14 2.290938e-06 8.145784e-04 0.999967320 5 4
## 26 2.287806e-14 3.964191e-06 9.651443e-06 0.999999919 5 5
## 27 9.954643e-17 1.489932e-06 3.184692e-04 0.999994747 5 4
## 28 1.176499e-12 1.932503e-04 4.510742e-01 0.993310170 5 5
## 29 9.292842e-15 2.247665e-03 9.694122e-01 0.304524251 4 3
## 30 5.657076e-10 1.553440e-03 1.157345e-01 0.994063380 5 3
## 31 4.960204e-17 2.936033e-07 1.690256e-06 0.999999954 5 5
## 32 1.137303e-16 1.216684e-09 9.287956e-01 0.998298630 5 5
## 33 3.265468e-16 9.852732e-03 1.597382e-01 0.973840683 5 5
## 34 7.768603e-11 1.918118e-03 1.673354e-01 0.992449116 5 3
## 35 7.859916e-12 1.295114e-05 1.405890e-01 0.990406881 5 4
## 36 6.125298e-14 7.950514e-04 1.202840e-03 0.999763483 5 4
## 37 4.963421e-11 2.208852e-04 4.076696e-01 0.992368057 5 5
## 38 6.188716e-14 1.711443e-04 7.394886e-01 0.985477459 5 3
## 39 2.471335e-16 1.725662e-08 6.018308e-07 0.999999999 5 5
## 40 9.480203e-12 1.176351e-05 8.264750e-02 0.997768541 5 3
## 41 6.151776e-17 1.044972e-09 6.308179e-09 1.000000000 5 5
## 42 4.209287e-13 8.812699e-01 6.241611e-02 0.629805677 3 4
## 43 5.449696e-11 1.789844e-04 9.931665e-01 0.569376178 4 5
## 44 1.843851e-14 1.761054e-04 9.999674e-01 0.004421912 4 3
## 45 8.943985e-14 5.079945e-04 1.222923e-06 0.999983308 5 5
## 46 5.855868e-13 1.293802e-04 8.135405e-03 0.999746264 5 4
## 47 2.741568e-12 3.504416e-03 8.426954e-03 0.994707065 5 5
## 48 1.850219e-14 7.503094e-09 1.038797e-02 0.999986406 5 3
## 49 9.163059e-18 8.343676e-09 2.529059e-06 0.999999947 5 5
## 50 1.310257e-14 1.598402e-04 2.756814e-02 0.999522484 5 5
## 51 3.677139e-13 8.113590e-06 5.722211e-03 0.999951596 5 4
## 52 4.918097e-14 7.411154e-07 6.173445e-02 0.999918144 5 5
## 53 1.341986e-12 2.869269e-05 7.340764e-01 0.993511225 5 4
## 54 1.284512e-11 1.746888e-04 4.290020e-01 0.993376854 5 4
## 55 5.811643e-12 1.089171e-03 1.837772e-03 0.999889574 5 4
## 56 7.419313e-12 1.042043e-05 1.130614e-02 0.999176215 5 5
## 57 2.434935e-12 5.312471e-05 3.393601e-02 0.999621942 5 2
## 58 6.538434e-17 7.671436e-09 1.365704e-01 0.999899182 5 5
## 59 5.435994e-13 4.765922e-06 8.852500e-03 0.999838734 5 4
## 60 2.262822e-12 1.699183e-04 5.468478e-01 0.983915717 5 4
## 61 8.321301e-14 7.809485e-04 4.824356e-04 0.999926472 5 5
## 62 2.401182e-14 1.557966e-03 6.731363e-04 0.999889227 5 5
## 63 1.932828e-14 3.183750e-06 5.868054e-02 0.999768671 5 4
## 64 2.863585e-16 2.345279e-05 8.475688e-02 0.998077613 5 5
## 65 3.716267e-18 9.589778e-09 3.523234e-07 0.999999998 5 5
## 66 5.464305e-12 3.421204e-08 5.673278e-05 0.999998505 5 5
## 67 5.271397e-14 1.199793e-04 3.336015e-03 0.999937860 5 3
## 68 2.029324e-12 5.515777e-04 7.076808e-02 0.995786544 5 3
## 69 1.044893e-16 3.515477e-06 8.439259e-01 0.990986670 5 4
## 70 4.800449e-13 6.084361e-04 2.330794e-02 0.999027995 5 4
## 71 1.231028e-13 3.203905e-07 2.276844e-02 0.999818914 5 5
## 72 3.052761e-15 1.143671e-09 6.226369e-04 0.999999333 5 5
## 73 2.866246e-10 8.408950e-05 2.248163e-01 0.988996782 5 2
## 74 2.594845e-10 9.032051e-05 2.089692e-01 0.989681854 5 4
## 75 3.034782e-10 5.621804e-06 9.418721e-02 0.901760559 5 4
## 76 8.970794e-15 3.819240e-03 9.538244e-01 0.176384048 4 5
## 77 3.665166e-15 5.549350e-07 9.999988e-01 0.006651782 4 3
## 78 4.521079e-13 7.181885e-08 3.100828e-01 0.998703133 5 4
## 79 8.880919e-11 7.334721e-04 8.686732e-02 0.997054511 5 5
## 80 1.154603e-19 2.765719e-08 8.835123e-02 0.999898988 5 5
## 81 6.139038e-16 1.163066e-07 2.228855e-03 0.999998459 5 5
## 82 6.285701e-16 1.406258e-08 2.217798e-06 0.999999979 5 4
## 83 9.957784e-01 5.418226e-05 9.664081e-04 0.075301756 2 5
## 84 6.335946e-19 5.885264e-09 9.275849e-05 0.999999953 5 5
## 85 5.646235e-16 3.705247e-06 2.049117e-05 0.999999660 5 5
## 86 1.842995e-03 5.237766e-02 6.140164e-04 0.061441477 5 3
## 87 1.374689e-14 1.229049e-03 1.212856e-06 0.999997654 5 4
## 88 3.109605e-11 1.233590e-04 9.981229e-01 0.350201554 4 5
## 89 6.596336e-18 1.423172e-08 9.999960e-01 0.109038596 4 5
## 90 2.767446e-14 4.630962e-07 2.417654e-04 0.999999220 5 3
## 91 1.098664e-13 4.878078e-03 8.886686e-03 0.987965101 5 3
## 92 2.406708e-15 9.902475e-05 9.995675e-01 0.009090852 4 5
## 93 1.952708e-13 4.523985e-06 9.961801e-01 0.703741702 4 4
## 94 2.894780e-12 1.276629e-05 9.944750e-01 0.519309180 4 5
## 95 8.428023e-14 3.133929e-04 1.519635e-02 0.999544154 5 5
## 96 1.135673e-13 9.877995e-06 9.996749e-01 0.117713707 4 5
## 97 1.288343e-13 2.182782e-05 2.145045e-02 0.999874487 5 5
## 98 1.060526e-17 3.658185e-02 3.936779e-01 0.851204546 5 5
## 99 2.749324e-09 7.603557e-06 3.938168e-05 0.999915391 5 2
## 100 1.550629e-13 9.922438e-04 1.612771e-01 0.993141047 5 5
CM <- table(Evaluation$Actual,Evaluation$Vote)
CM
##
## 2 3 4 5
## 2 0 0 1 6
## 3 0 1 3 21
## 4 0 2 11 49
## 5 1 0 12 101
#Proportions
Overall <- length(Evaluation$Actual)
Length2 <- length(which(Evaluation$Actual==2))
Length3 <- length(which(Evaluation$Actual==3))
Length4 <- length(which(Evaluation$Actual==4))
Length5 <- length(which(Evaluation$Actual==5))
#Accuracy
Accuracy <- sum(diag(CM))/sum(CM)
#Precision
Precision <- diag(CM)/rowSums(CM)
Precision <- (Precision[1]*Length2+Precision[2]*Length3+Precision[3]*Length4+Precision[4]*Length5)/208
#Recall
Recall <- diag(CM)/colSums(CM)
Recall <- (Recall[1]*Length2+Recall[2]*Length3+Recall[3]*Length4+Recall[4]*Length5)/208
Accuracy
## [1] 0.5432692
Precision
## 2
## 0.5432692
Recall
## 2
## 0.4742473