setwd("~/Google Drive/UM/Smart Services/Thesis/Thesis/Code/Naive Bayes/2.Feature Set 1/TFIDF/50")
#install.packages("naivebayes")
library(naivebayes)
## Warning: package 'naivebayes' was built under R version 3.4.3
library(dplyr)
## Warning: Installed Rcpp (0.12.16) different from Rcpp used to build dplyr (0.12.11).
## Please reinstall dplyr to avoid random crashes or undefined behavior.
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
library(psych)
library(ggplot2)
##
## Attaching package: 'ggplot2'
## The following objects are masked from 'package:psych':
##
## %+%, alpha
library(e1071)
library(readxl)
#Import Labels
Labels <- read_excel("~/Google Drive/UM/Smart Services/Thesis/Thesis/Code/Naive Bayes/1.Labels/Source Data.xlsx")
Label <- Labels$Score
#Import Features
Features <- read.csv("~/Google Drive/UM/Smart Services/Thesis/Thesis/Code/Naive Bayes/2.Feature Set 1/TFIDF/50/Feature Set 1: 50th TFIDF.csv")
Features <- Features[-1]
#Class 2
Label2 <- list()
for(i in 1:1000){
if(Label[i]==3| Label[i]==4){
Label2[i] <- 1
}else{
Label2[i] <- 0
}
}
#As Factor
Label2 <- as.factor(unlist(Label2))
#Class 3
Label3 <- list()
for(i in 1:1000){
if(Label[i]==5| Label[i]==6){
Label3[i] <- 1
}else{
Label3[i] <- 0
}
}
#As Factor
Label3 <- as.factor(unlist(Label3))
#Class 4
Label4 <- list()
for(i in 1:1000){
if(Label[i]==7| Label[i]==8){
Label4[i] <- 1
}else{
Label4[i] <- 0
}
}
#As Factor
Label4 <- as.factor(unlist(Label4))
#Class 5
Label5 <- list()
for(i in 1:1000){
if(Label[i]==9| Label[i]==10){
Label5[i] <- 1
}else{
Label5[i] <- 0
}
}
#As Factor
Label5 <- as.factor(unlist(Label5))
#All Labels
All <- list()
for(i in 1:1000){
if(Label[i]==9| Label[i]==10){
All[i] <- 5
}else if(Label[i]==7| Label[i]==8){
All[i] <- 4
}else if(Label[i]==5| Label[i]==6){
All[i] <- 3
}else{
All[i] <- 2
}
}
#As Factor
All <- as.factor(unlist(All))
#Control
Control.df <- data.frame(matrix(seq(1,1000),ncol=1,nrow=1000))
Control.df$Actual <- Label
Control.df$All <- All
Control.df$Label2 <- Label2
Control.df$Label3 <- Label3
Control.df$Label4 <- Label4
Control.df$Label5 <- Label5
Control.df[1:10,2:7]
## Actual All Label2 Label3 Label4 Label5
## 1 3 2 1 0 0 0
## 2 8 4 0 0 1 0
## 3 7 4 0 0 1 0
## 4 4 2 1 0 0 0
## 5 7 4 0 0 1 0
## 6 7 4 0 0 1 0
## 7 5 3 0 1 0 0
## 8 10 5 0 0 0 1
## 9 7 4 0 0 1 0
## 10 8 4 0 0 1 0
#Transform Integer to Factor
for(i in 1:1336){
Features[,i] <- as.factor(Features[,i])
}
str(Features)
## 'data.frame': 1000 obs. of 1336 variables:
## $ abl : Factor w/ 13 levels "0","0.0350594603513238",..: 2 1 1 1 1 1 1 1 1 1 ...
## $ about : Factor w/ 2 levels "0","0.71184173461872": 1 1 1 1 1 1 1 1 1 1 ...
## $ absolut : Factor w/ 10 levels "0","0.0363483389163396",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ accent : Factor w/ 3 levels "0","0.373574345194254",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ accept : Factor w/ 4 levels "0","0.133028917205412",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ access : Factor w/ 29 levels "0","0.0374430565263987",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ accommod : Factor w/ 15 levels "0","0.0542344025878372",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ accomplish : Factor w/ 2 levels "0","0.586222604980123": 1 1 1 1 1 1 1 1 1 1 ...
## $ across : Factor w/ 4 levels "0","0.100973756433023",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ actual : Factor w/ 10 levels "0","0.0365047043394216",..: 1 1 1 1 1 1 1 1 10 1 ...
## $ adequ : Factor w/ 7 levels "0","0.127829095760794",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ adjac : Factor w/ 5 levels "0","0.0926253986588615",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ ador : Factor w/ 2 levels "0","0.71184173461872": 1 1 1 1 1 1 1 1 1 1 ...
## $ adult : Factor w/ 4 levels "0","0.152378577889835",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ advanc : Factor w/ 7 levels "0","0.0441965376283888",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ adverti : Factor w/ 8 levels "0","0.0522513092160911",..: 1 1 1 1 3 1 1 1 1 1 ...
## $ advi : Factor w/ 4 levels "0","0.0805848248455859",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ advic : Factor w/ 7 levels "0","0.0709694402302013",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ affect : Factor w/ 6 levels "0","0.0920946528888521",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ after : Factor w/ 6 levels "0","0.112409649849628",..: 1 1 1 1 2 1 1 1 1 1 ...
## $ ago : Factor w/ 3 levels "0","0.25616526527606",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ ahead : Factor w/ 2 levels "0","0.664385618977473": 1 1 1 1 1 1 1 1 1 1 ...
## $ air : Factor w/ 23 levels "0","0.0694786237666482",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ aircon : Factor w/ 3 levels "0","0.320206581595075",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ aircondit : Factor w/ 5 levels "0","0.177017428548046",..: 1 1 1 1 1 1 1 1 1 3 ...
## $ airi : Factor w/ 4 levels "0","0.0805848248455859",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ airport : Factor w/ 18 levels "0","0.0323791021408926",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ alarm : Factor w/ 4 levels "0","0.209520544598523",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ albert : Factor w/ 3 levels "0","0.119543790462161",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ albrt : Factor w/ 2 levels "0","0.586222604980123": 1 1 1 1 1 1 1 1 1 1 ...
## $ aldo : Factor w/ 2 levels "0","0.766598791127853": 1 1 1 1 1 1 1 1 1 1 ...
## $ alittl : Factor w/ 2 levels "0","0.905980389514735": 1 1 1 1 1 1 1 1 1 1 ...
## $ all : Factor w/ 15 levels "0","0.0441909947012006",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ alloc : Factor w/ 5 levels "0","0.26552614282207",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ allow : Factor w/ 11 levels "0","0.0962877708663004",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ alreadi : Factor w/ 7 levels "0","0.0620237124700919",..: 1 1 1 2 1 1 1 1 1 1 ...
## $ also : Factor w/ 56 levels "0","0.0203241634283911",..: 1 1 1 31 1 1 41 1 50 1 ...
## $ altern : Factor w/ 4 levels "0","0.126982148241529",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ although : Factor w/ 22 levels "0","0.0505889368905357",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ alway : Factor w/ 19 levels "0","0.0418063421464794",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ amaz : Factor w/ 32 levels "0","0.0329053505822598",..: 1 1 1 1 7 1 1 1 1 1 ...
## $ ambianc : Factor w/ 2 levels "0","0.996578428466209": 1 1 1 1 1 1 1 1 1 1 ...
## $ amen : Factor w/ 9 levels "0","0.0703614574208292",..: 1 3 1 1 1 1 1 1 1 1 ...
## $ american : Factor w/ 4 levels "0","0.074828765928044",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ amount : Factor w/ 3 levels "0","0.0536873310458808",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ amsterdam : Factor w/ 22 levels "0","0.0329721716528431",..: 1 6 1 4 1 1 1 1 1 1 ...
## $ and : Factor w/ 18 levels "0","0.0310101988449161",..: 2 1 1 1 1 1 1 1 1 1 ...
## $ anna : Factor w/ 3 levels "0","0.640413163190149",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ annoy : Factor w/ 10 levels "0","0.0357491904726637",..: 2 1 1 1 1 1 1 1 1 1 ...
## $ anoth : Factor w/ 15 levels "0","0.0368768225305658",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ answer : Factor w/ 7 levels "0","0.054672753955118",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ ant : Factor w/ 2 levels "0","1.99315685693242": 1 1 1 1 1 1 1 1 1 1 ...
## $ anymor : Factor w/ 6 levels "0","0.0419992098339271",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ anyon : Factor w/ 11 levels "0","0.067109658482573",..: 1 5 1 1 1 1 1 1 1 1 ...
## $ anyth : Factor w/ 14 levels "0","0.0368768225305658",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ anyway : Factor w/ 6 levels "0","0.0457715939507469",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ anywh : Factor w/ 4 levels "0","0.0838082178394093",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ apart : Factor w/ 9 levels "0","0.0557262742772967",..: 1 1 1 1 1 1 1 7 1 1 ...
## $ apolog : Factor w/ 4 levels "0","0.0997716879040587",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ appal : Factor w/ 3 levels "0","0.640413163190149",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ appeal : Factor w/ 3 levels "0","0.249049563462836",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ appear : Factor w/ 6 levels "0","0.0955482023721841",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ applic : Factor w/ 2 levels "0","1.10730936496245": 1 1 1 1 1 1 1 1 1 1 ...
## $ appoint : Factor w/ 5 levels "0","0.221271785685058",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ appreci : Factor w/ 7 levels "0","0.0688310515635046",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ approach : Factor w/ 4 levels "0","0.174600453832103",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ april : Factor w/ 2 levels "0","0.830482023721841": 1 1 1 1 1 1 1 1 1 1 ...
## $ architectur : Factor w/ 7 levels "0","0.111830633090014",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ area : Factor w/ 61 levels "0","0.0271674294106491",..: 1 1 1 3 1 1 1 1 1 1 ...
## $ arena : Factor w/ 11 levels "0","0.0472653465477106",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ aroom : Factor w/ 2 levels "0","0.71184173461872": 1 1 1 1 1 1 1 1 1 1 ...
## $ around : Factor w/ 41 levels "0","0.0246435499049411",..: 1 1 1 4 1 1 1 1 1 1 ...
## $ arrang : Factor w/ 4 levels "0","0.155200403406314",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ arriv : Factor w/ 44 levels "0","0.0239070024205872",..: 2 1 1 1 13 1 1 1 1 1 ...
## $ art : Factor w/ 3 levels "0","0.373574345194254",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ artwork : Factor w/ 2 levels "0","0.62286151779138": 1 1 1 1 1 1 1 1 1 1 ...
## $ ask : Factor w/ 36 levels "0","0.0239070024205872",..: 2 1 22 1 1 1 1 1 1 1 ...
## $ aspect : Factor w/ 6 levels "0","0.0682487159801315",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ athmosph : Factor w/ 2 levels "0","1.66096404744368": 1 1 1 1 1 1 1 1 1 1 ...
## $ atm : Factor w/ 5 levels "0","0.0711230739701972",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ atmosph : Factor w/ 12 levels "0","0.100097733323458",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ attend : Factor w/ 4 levels "0","0.178315357105126",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ attent : Factor w/ 13 levels "0","0.0634889625010771",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ attic : Factor w/ 5 levels "0","0.0995723035582761",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ attitud : Factor w/ 4 levels "0","0.147031961121771",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ attract : Factor w/ 11 levels "0","0.147641248661661",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ atttent : Factor w/ 2 levels "0","1.24572303558276": 1 1 1 1 1 1 1 1 1 1 ...
## $ avail : Factor w/ 19 levels "0","0.0310101988449161",..: 2 1 1 1 1 1 1 1 1 1 ...
## $ averag : Factor w/ 7 levels "0","0.137662103127009",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ awar : Factor w/ 6 levels "0","0.107659946334855",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ away : Factor w/ 25 levels "0","0.0677544644918298",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ awesom : Factor w/ 8 levels "0","0.0715842936260448",..: 1 1 1 1 1 4 1 1 1 1 ...
## $ back : Factor w/ 34 levels "0","0.0249424149997802",..: 1 8 19 1 1 1 1 1 1 1 ...
## $ backyard : Factor w/ 3 levels "0","0.407535649302822",..: 1 1 1 1 1 2 1 1 1 1 ...
## $ bacon : Factor w/ 11 levels "0","0.0603986926343157",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ bad : Factor w/ 29 levels "0","0.0272845290366049",..: 1 1 1 5 1 1 1 1 1 1 ...
## $ bag : Factor w/ 9 levels "0","0.0669786950448278",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ bake : Factor w/ 4 levels "0","0.186240484087576",..: 1 1 1 1 1 1 1 1 1 1 ...
## $ bang : Factor w/ 6 levels "0","0.0955482023721841",..: 1 1 1 1 1 1 1 1 1 1 ...
## [list output truncated]
#Features
set.seed(1234)
ind <- sample(2,nrow(Features),replace = T, prob =c(0.8,0.2))
train <- Features[ind == 1,]
test <- Features[ind ==2,]
train.labels.2 <- Label2[ind == 1]
test.labels.2 <- Label2[ind ==2]
train.labels.3 <- Label3[ind == 1]
test.labels.3 <- Label3[ind ==2]
train.labels.4 <- Label4[ind == 1]
test.labels.4 <- Label4[ind ==2]
train.labels.5 <- Label5[ind == 1]
test.labels.5 <- Label5[ind ==2]
train.labels <- All[ind == 1]
test.labels <- All[ind ==2]
data.frame(train.labels,train.labels.2,train.labels.3,train.labels.4,train.labels.5)
## train.labels train.labels.2 train.labels.3 train.labels.4
## 1 2 1 0 0
## 2 4 0 0 1
## 3 4 0 0 1
## 4 2 1 0 0
## 5 4 0 0 1
## 6 3 0 1 0
## 7 5 0 0 0
## 8 4 0 0 1
## 9 4 0 0 1
## 10 5 0 0 0
## 11 3 0 1 0
## 12 3 0 1 0
## 13 5 0 0 0
## 14 3 0 1 0
## 15 4 0 0 1
## 16 4 0 0 1
## 17 4 0 0 1
## 18 3 0 1 0
## 19 2 1 0 0
## 20 3 0 1 0
## 21 5 0 0 0
## 22 5 0 0 0
## 23 5 0 0 0
## 24 5 0 0 0
## 25 5 0 0 0
## 26 4 0 0 1
## 27 2 1 0 0
## 28 4 0 0 1
## 29 4 0 0 1
## 30 4 0 0 1
## 31 4 0 0 1
## 32 5 0 0 0
## 33 4 0 0 1
## 34 4 0 0 1
## 35 2 1 0 0
## 36 5 0 0 0
## 37 2 1 0 0
## 38 4 0 0 1
## 39 2 1 0 0
## 40 4 0 0 1
## 41 5 0 0 0
## 42 3 0 1 0
## 43 5 0 0 0
## 44 4 0 0 1
## 45 5 0 0 0
## 46 5 0 0 0
## 47 4 0 0 1
## 48 5 0 0 0
## 49 4 0 0 1
## 50 4 0 0 1
## 51 4 0 0 1
## 52 4 0 0 1
## 53 4 0 0 1
## 54 3 0 1 0
## 55 5 0 0 0
## 56 5 0 0 0
## 57 5 0 0 0
## 58 4 0 0 1
## 59 4 0 0 1
## 60 5 0 0 0
## 61 4 0 0 1
## 62 4 0 0 1
## 63 3 0 1 0
## 64 4 0 0 1
## 65 5 0 0 0
## 66 4 0 0 1
## 67 4 0 0 1
## 68 5 0 0 0
## 69 5 0 0 0
## 70 5 0 0 0
## 71 3 0 1 0
## 72 5 0 0 0
## 73 4 0 0 1
## 74 4 0 0 1
## 75 3 0 1 0
## 76 3 0 1 0
## 77 2 1 0 0
## 78 3 0 1 0
## 79 2 1 0 0
## 80 4 0 0 1
## 81 5 0 0 0
## 82 5 0 0 0
## 83 5 0 0 0
## 84 3 0 1 0
## 85 3 0 1 0
## 86 3 0 1 0
## 87 3 0 1 0
## 88 5 0 0 0
## 89 5 0 0 0
## 90 4 0 0 1
## 91 5 0 0 0
## 92 4 0 0 1
## 93 2 1 0 0
## 94 5 0 0 0
## 95 5 0 0 0
## 96 4 0 0 1
## 97 3 0 1 0
## 98 5 0 0 0
## 99 5 0 0 0
## 100 5 0 0 0
## 101 4 0 0 1
## 102 4 0 0 1
## 103 3 0 1 0
## 104 3 0 1 0
## 105 4 0 0 1
## 106 3 0 1 0
## 107 5 0 0 0
## 108 5 0 0 0
## 109 4 0 0 1
## 110 4 0 0 1
## 111 4 0 0 1
## 112 5 0 0 0
## 113 5 0 0 0
## 114 5 0 0 0
## 115 3 0 1 0
## 116 5 0 0 0
## 117 4 0 0 1
## 118 3 0 1 0
## 119 5 0 0 0
## 120 5 0 0 0
## 121 2 1 0 0
## 122 3 0 1 0
## 123 5 0 0 0
## 124 3 0 1 0
## 125 5 0 0 0
## 126 4 0 0 1
## 127 4 0 0 1
## 128 4 0 0 1
## 129 5 0 0 0
## 130 2 1 0 0
## 131 5 0 0 0
## 132 4 0 0 1
## 133 3 0 1 0
## 134 5 0 0 0
## 135 4 0 0 1
## 136 5 0 0 0
## 137 4 0 0 1
## 138 4 0 0 1
## 139 2 1 0 0
## 140 5 0 0 0
## 141 4 0 0 1
## 142 2 1 0 0
## 143 5 0 0 0
## 144 5 0 0 0
## 145 4 0 0 1
## 146 5 0 0 0
## 147 5 0 0 0
## 148 5 0 0 0
## 149 4 0 0 1
## 150 4 0 0 1
## 151 4 0 0 1
## 152 3 0 1 0
## 153 4 0 0 1
## 154 4 0 0 1
## 155 2 1 0 0
## 156 5 0 0 0
## 157 2 1 0 0
## 158 3 0 1 0
## 159 5 0 0 0
## 160 5 0 0 0
## 161 4 0 0 1
## 162 5 0 0 0
## 163 5 0 0 0
## 164 3 0 1 0
## 165 3 0 1 0
## 166 5 0 0 0
## 167 4 0 0 1
## 168 3 0 1 0
## 169 5 0 0 0
## 170 4 0 0 1
## 171 5 0 0 0
## 172 5 0 0 0
## 173 3 0 1 0
## 174 4 0 0 1
## 175 4 0 0 1
## 176 5 0 0 0
## 177 3 0 1 0
## 178 5 0 0 0
## 179 5 0 0 0
## 180 5 0 0 0
## 181 5 0 0 0
## 182 5 0 0 0
## 183 5 0 0 0
## 184 5 0 0 0
## 185 4 0 0 1
## 186 5 0 0 0
## 187 5 0 0 0
## 188 5 0 0 0
## 189 5 0 0 0
## 190 4 0 0 1
## 191 5 0 0 0
## 192 4 0 0 1
## 193 5 0 0 0
## 194 4 0 0 1
## 195 4 0 0 1
## 196 4 0 0 1
## 197 5 0 0 0
## 198 5 0 0 0
## 199 5 0 0 0
## 200 5 0 0 0
## 201 4 0 0 1
## 202 4 0 0 1
## 203 4 0 0 1
## 204 3 0 1 0
## 205 5 0 0 0
## 206 5 0 0 0
## 207 5 0 0 0
## 208 3 0 1 0
## 209 5 0 0 0
## 210 4 0 0 1
## 211 5 0 0 0
## 212 3 0 1 0
## 213 4 0 0 1
## 214 4 0 0 1
## 215 3 0 1 0
## 216 4 0 0 1
## 217 4 0 0 1
## 218 5 0 0 0
## 219 2 1 0 0
## 220 5 0 0 0
## 221 5 0 0 0
## 222 5 0 0 0
## 223 4 0 0 1
## 224 5 0 0 0
## 225 5 0 0 0
## 226 5 0 0 0
## 227 5 0 0 0
## 228 5 0 0 0
## 229 2 1 0 0
## 230 5 0 0 0
## 231 4 0 0 1
## 232 4 0 0 1
## 233 5 0 0 0
## 234 5 0 0 0
## 235 5 0 0 0
## 236 5 0 0 0
## 237 5 0 0 0
## 238 5 0 0 0
## 239 5 0 0 0
## 240 5 0 0 0
## 241 4 0 0 1
## 242 5 0 0 0
## 243 3 0 1 0
## 244 3 0 1 0
## 245 3 0 1 0
## 246 5 0 0 0
## 247 5 0 0 0
## 248 3 0 1 0
## 249 5 0 0 0
## 250 4 0 0 1
## 251 5 0 0 0
## 252 5 0 0 0
## 253 5 0 0 0
## 254 5 0 0 0
## 255 3 0 1 0
## 256 4 0 0 1
## 257 4 0 0 1
## 258 5 0 0 0
## 259 5 0 0 0
## 260 5 0 0 0
## 261 4 0 0 1
## 262 4 0 0 1
## 263 5 0 0 0
## 264 5 0 0 0
## 265 2 1 0 0
## 266 4 0 0 1
## 267 2 1 0 0
## 268 4 0 0 1
## 269 2 1 0 0
## 270 5 0 0 0
## 271 4 0 0 1
## 272 2 1 0 0
## 273 5 0 0 0
## 274 3 0 1 0
## 275 3 0 1 0
## 276 5 0 0 0
## 277 2 1 0 0
## 278 5 0 0 0
## 279 4 0 0 1
## 280 3 0 1 0
## 281 5 0 0 0
## 282 5 0 0 0
## 283 3 0 1 0
## 284 5 0 0 0
## 285 5 0 0 0
## 286 3 0 1 0
## 287 3 0 1 0
## 288 5 0 0 0
## 289 5 0 0 0
## 290 5 0 0 0
## 291 3 0 1 0
## 292 5 0 0 0
## 293 5 0 0 0
## 294 5 0 0 0
## 295 5 0 0 0
## 296 3 0 1 0
## 297 3 0 1 0
## 298 4 0 0 1
## 299 4 0 0 1
## 300 4 0 0 1
## 301 4 0 0 1
## 302 5 0 0 0
## 303 5 0 0 0
## 304 4 0 0 1
## 305 3 0 1 0
## 306 5 0 0 0
## 307 5 0 0 0
## 308 3 0 1 0
## 309 5 0 0 0
## 310 4 0 0 1
## 311 5 0 0 0
## 312 5 0 0 0
## 313 5 0 0 0
## 314 5 0 0 0
## 315 3 0 1 0
## 316 5 0 0 0
## 317 5 0 0 0
## 318 5 0 0 0
## 319 5 0 0 0
## 320 4 0 0 1
## 321 5 0 0 0
## 322 5 0 0 0
## 323 4 0 0 1
## 324 5 0 0 0
## 325 5 0 0 0
## 326 5 0 0 0
## 327 5 0 0 0
## 328 5 0 0 0
## 329 5 0 0 0
## 330 5 0 0 0
## 331 5 0 0 0
## 332 4 0 0 1
## 333 5 0 0 0
## 334 5 0 0 0
## 335 4 0 0 1
## 336 5 0 0 0
## 337 4 0 0 1
## 338 4 0 0 1
## 339 3 0 1 0
## 340 5 0 0 0
## 341 4 0 0 1
## 342 5 0 0 0
## 343 4 0 0 1
## 344 4 0 0 1
## 345 5 0 0 0
## 346 3 0 1 0
## 347 4 0 0 1
## 348 4 0 0 1
## 349 4 0 0 1
## 350 5 0 0 0
## 351 5 0 0 0
## 352 4 0 0 1
## 353 4 0 0 1
## 354 5 0 0 0
## 355 4 0 0 1
## 356 4 0 0 1
## 357 5 0 0 0
## 358 5 0 0 0
## 359 5 0 0 0
## 360 5 0 0 0
## 361 5 0 0 0
## 362 4 0 0 1
## 363 5 0 0 0
## 364 4 0 0 1
## 365 5 0 0 0
## 366 5 0 0 0
## 367 5 0 0 0
## 368 5 0 0 0
## 369 4 0 0 1
## 370 5 0 0 0
## 371 4 0 0 1
## 372 5 0 0 0
## 373 3 0 1 0
## 374 4 0 0 1
## 375 5 0 0 0
## 376 5 0 0 0
## 377 4 0 0 1
## 378 4 0 0 1
## 379 4 0 0 1
## 380 3 0 1 0
## 381 5 0 0 0
## 382 4 0 0 1
## 383 5 0 0 0
## 384 3 0 1 0
## 385 3 0 1 0
## 386 4 0 0 1
## 387 5 0 0 0
## 388 5 0 0 0
## 389 3 0 1 0
## 390 4 0 0 1
## 391 4 0 0 1
## 392 5 0 0 0
## 393 3 0 1 0
## 394 4 0 0 1
## 395 5 0 0 0
## 396 5 0 0 0
## 397 3 0 1 0
## 398 4 0 0 1
## 399 4 0 0 1
## 400 4 0 0 1
## 401 5 0 0 0
## 402 5 0 0 0
## 403 5 0 0 0
## 404 5 0 0 0
## 405 5 0 0 0
## 406 3 0 1 0
## 407 5 0 0 0
## 408 5 0 0 0
## 409 4 0 0 1
## 410 4 0 0 1
## 411 5 0 0 0
## 412 5 0 0 0
## 413 5 0 0 0
## 414 2 1 0 0
## 415 5 0 0 0
## 416 4 0 0 1
## 417 5 0 0 0
## 418 4 0 0 1
## 419 5 0 0 0
## 420 4 0 0 1
## 421 5 0 0 0
## 422 5 0 0 0
## 423 3 0 1 0
## 424 5 0 0 0
## 425 4 0 0 1
## 426 4 0 0 1
## 427 5 0 0 0
## 428 5 0 0 0
## 429 3 0 1 0
## 430 5 0 0 0
## 431 5 0 0 0
## 432 5 0 0 0
## 433 3 0 1 0
## 434 4 0 0 1
## 435 4 0 0 1
## 436 5 0 0 0
## 437 4 0 0 1
## 438 4 0 0 1
## 439 5 0 0 0
## 440 4 0 0 1
## 441 5 0 0 0
## 442 3 0 1 0
## 443 5 0 0 0
## 444 4 0 0 1
## 445 5 0 0 0
## 446 5 0 0 0
## 447 5 0 0 0
## 448 5 0 0 0
## 449 5 0 0 0
## 450 5 0 0 0
## 451 5 0 0 0
## 452 5 0 0 0
## 453 5 0 0 0
## 454 5 0 0 0
## 455 5 0 0 0
## 456 5 0 0 0
## 457 5 0 0 0
## 458 5 0 0 0
## 459 4 0 0 1
## 460 5 0 0 0
## 461 5 0 0 0
## 462 5 0 0 0
## 463 5 0 0 0
## 464 3 0 1 0
## 465 5 0 0 0
## 466 5 0 0 0
## 467 4 0 0 1
## 468 5 0 0 0
## 469 4 0 0 1
## 470 5 0 0 0
## 471 5 0 0 0
## 472 5 0 0 0
## 473 5 0 0 0
## 474 4 0 0 1
## 475 5 0 0 0
## 476 4 0 0 1
## 477 4 0 0 1
## 478 5 0 0 0
## 479 2 1 0 0
## 480 5 0 0 0
## 481 4 0 0 1
## 482 5 0 0 0
## 483 5 0 0 0
## 484 5 0 0 0
## 485 5 0 0 0
## 486 5 0 0 0
## 487 5 0 0 0
## 488 3 0 1 0
## 489 5 0 0 0
## 490 3 0 1 0
## 491 5 0 0 0
## 492 5 0 0 0
## 493 5 0 0 0
## 494 5 0 0 0
## 495 4 0 0 1
## 496 5 0 0 0
## 497 3 0 1 0
## 498 4 0 0 1
## 499 4 0 0 1
## 500 5 0 0 0
## 501 5 0 0 0
## 502 4 0 0 1
## 503 4 0 0 1
## 504 4 0 0 1
## 505 5 0 0 0
## 506 5 0 0 0
## 507 4 0 0 1
## 508 5 0 0 0
## 509 4 0 0 1
## 510 5 0 0 0
## 511 4 0 0 1
## 512 5 0 0 0
## 513 5 0 0 0
## 514 5 0 0 0
## 515 5 0 0 0
## 516 5 0 0 0
## 517 4 0 0 1
## 518 4 0 0 1
## 519 5 0 0 0
## 520 2 1 0 0
## 521 2 1 0 0
## 522 4 0 0 1
## 523 5 0 0 0
## 524 4 0 0 1
## 525 5 0 0 0
## 526 4 0 0 1
## 527 5 0 0 0
## 528 4 0 0 1
## 529 4 0 0 1
## 530 4 0 0 1
## 531 4 0 0 1
## 532 5 0 0 0
## 533 4 0 0 1
## 534 4 0 0 1
## 535 5 0 0 0
## 536 5 0 0 0
## 537 4 0 0 1
## 538 5 0 0 0
## 539 5 0 0 0
## 540 3 0 1 0
## 541 5 0 0 0
## 542 5 0 0 0
## 543 4 0 0 1
## 544 4 0 0 1
## 545 5 0 0 0
## 546 5 0 0 0
## 547 5 0 0 0
## 548 4 0 0 1
## 549 5 0 0 0
## 550 5 0 0 0
## 551 4 0 0 1
## 552 5 0 0 0
## 553 5 0 0 0
## 554 4 0 0 1
## 555 3 0 1 0
## 556 4 0 0 1
## 557 3 0 1 0
## 558 4 0 0 1
## 559 4 0 0 1
## 560 5 0 0 0
## 561 5 0 0 0
## 562 5 0 0 0
## 563 4 0 0 1
## 564 4 0 0 1
## 565 5 0 0 0
## 566 4 0 0 1
## 567 4 0 0 1
## 568 5 0 0 0
## 569 3 0 1 0
## 570 5 0 0 0
## 571 4 0 0 1
## 572 5 0 0 0
## 573 4 0 0 1
## 574 4 0 0 1
## 575 5 0 0 0
## 576 5 0 0 0
## 577 5 0 0 0
## 578 5 0 0 0
## 579 5 0 0 0
## 580 5 0 0 0
## 581 5 0 0 0
## 582 4 0 0 1
## 583 4 0 0 1
## 584 5 0 0 0
## 585 3 0 1 0
## 586 5 0 0 0
## 587 5 0 0 0
## 588 4 0 0 1
## 589 5 0 0 0
## 590 5 0 0 0
## 591 5 0 0 0
## 592 4 0 0 1
## 593 5 0 0 0
## 594 5 0 0 0
## 595 5 0 0 0
## 596 5 0 0 0
## 597 4 0 0 1
## 598 4 0 0 1
## 599 3 0 1 0
## 600 4 0 0 1
## 601 5 0 0 0
## 602 5 0 0 0
## 603 5 0 0 0
## 604 4 0 0 1
## 605 4 0 0 1
## 606 5 0 0 0
## 607 5 0 0 0
## 608 3 0 1 0
## 609 4 0 0 1
## 610 5 0 0 0
## 611 5 0 0 0
## 612 4 0 0 1
## 613 5 0 0 0
## 614 5 0 0 0
## 615 5 0 0 0
## 616 5 0 0 0
## 617 3 0 1 0
## 618 5 0 0 0
## 619 4 0 0 1
## 620 4 0 0 1
## 621 5 0 0 0
## 622 4 0 0 1
## 623 5 0 0 0
## 624 4 0 0 1
## 625 4 0 0 1
## 626 5 0 0 0
## 627 5 0 0 0
## 628 4 0 0 1
## 629 5 0 0 0
## 630 4 0 0 1
## 631 4 0 0 1
## 632 5 0 0 0
## 633 5 0 0 0
## 634 5 0 0 0
## 635 5 0 0 0
## 636 5 0 0 0
## 637 5 0 0 0
## 638 4 0 0 1
## 639 5 0 0 0
## 640 5 0 0 0
## 641 3 0 1 0
## 642 5 0 0 0
## 643 4 0 0 1
## 644 3 0 1 0
## 645 5 0 0 0
## 646 5 0 0 0
## 647 4 0 0 1
## 648 5 0 0 0
## 649 5 0 0 0
## 650 5 0 0 0
## 651 5 0 0 0
## 652 5 0 0 0
## 653 5 0 0 0
## 654 5 0 0 0
## 655 5 0 0 0
## 656 4 0 0 1
## 657 5 0 0 0
## 658 5 0 0 0
## 659 5 0 0 0
## 660 3 0 1 0
## 661 5 0 0 0
## 662 5 0 0 0
## 663 5 0 0 0
## 664 5 0 0 0
## 665 4 0 0 1
## 666 5 0 0 0
## 667 4 0 0 1
## 668 5 0 0 0
## 669 4 0 0 1
## 670 5 0 0 0
## 671 4 0 0 1
## 672 5 0 0 0
## 673 5 0 0 0
## 674 5 0 0 0
## 675 5 0 0 0
## 676 5 0 0 0
## 677 4 0 0 1
## 678 5 0 0 0
## 679 4 0 0 1
## 680 5 0 0 0
## 681 5 0 0 0
## 682 3 0 1 0
## 683 4 0 0 1
## 684 4 0 0 1
## 685 5 0 0 0
## 686 5 0 0 0
## 687 4 0 0 1
## 688 4 0 0 1
## 689 4 0 0 1
## 690 5 0 0 0
## 691 5 0 0 0
## 692 4 0 0 1
## 693 5 0 0 0
## 694 5 0 0 0
## 695 5 0 0 0
## 696 5 0 0 0
## 697 5 0 0 0
## 698 5 0 0 0
## 699 5 0 0 0
## 700 5 0 0 0
## 701 5 0 0 0
## 702 4 0 0 1
## 703 5 0 0 0
## 704 5 0 0 0
## 705 4 0 0 1
## 706 4 0 0 1
## 707 5 0 0 0
## 708 5 0 0 0
## 709 4 0 0 1
## 710 4 0 0 1
## 711 5 0 0 0
## 712 5 0 0 0
## 713 3 0 1 0
## 714 5 0 0 0
## 715 5 0 0 0
## 716 5 0 0 0
## 717 5 0 0 0
## 718 5 0 0 0
## 719 5 0 0 0
## 720 4 0 0 1
## 721 5 0 0 0
## 722 5 0 0 0
## 723 5 0 0 0
## 724 4 0 0 1
## 725 5 0 0 0
## 726 5 0 0 0
## 727 4 0 0 1
## 728 3 0 1 0
## 729 5 0 0 0
## 730 5 0 0 0
## 731 4 0 0 1
## 732 4 0 0 1
## 733 5 0 0 0
## 734 5 0 0 0
## 735 5 0 0 0
## 736 5 0 0 0
## 737 5 0 0 0
## 738 5 0 0 0
## 739 5 0 0 0
## 740 5 0 0 0
## 741 5 0 0 0
## 742 5 0 0 0
## 743 5 0 0 0
## 744 5 0 0 0
## 745 5 0 0 0
## 746 4 0 0 1
## 747 4 0 0 1
## 748 5 0 0 0
## 749 5 0 0 0
## 750 5 0 0 0
## 751 5 0 0 0
## 752 5 0 0 0
## 753 5 0 0 0
## 754 5 0 0 0
## 755 5 0 0 0
## 756 5 0 0 0
## 757 4 0 0 1
## 758 4 0 0 1
## 759 5 0 0 0
## 760 4 0 0 1
## 761 4 0 0 1
## 762 5 0 0 0
## 763 4 0 0 1
## 764 5 0 0 0
## 765 5 0 0 0
## 766 4 0 0 1
## 767 4 0 0 1
## 768 4 0 0 1
## 769 4 0 0 1
## 770 5 0 0 0
## 771 4 0 0 1
## 772 5 0 0 0
## 773 4 0 0 1
## 774 5 0 0 0
## 775 4 0 0 1
## 776 4 0 0 1
## 777 5 0 0 0
## 778 5 0 0 0
## 779 4 0 0 1
## 780 5 0 0 0
## 781 5 0 0 0
## 782 3 0 1 0
## 783 5 0 0 0
## 784 5 0 0 0
## 785 4 0 0 1
## 786 5 0 0 0
## 787 5 0 0 0
## 788 5 0 0 0
## 789 5 0 0 0
## 790 5 0 0 0
## 791 4 0 0 1
## 792 5 0 0 0
## train.labels.5
## 1 0
## 2 0
## 3 0
## 4 0
## 5 0
## 6 0
## 7 1
## 8 0
## 9 0
## 10 1
## 11 0
## 12 0
## 13 1
## 14 0
## 15 0
## 16 0
## 17 0
## 18 0
## 19 0
## 20 0
## 21 1
## 22 1
## 23 1
## 24 1
## 25 1
## 26 0
## 27 0
## 28 0
## 29 0
## 30 0
## 31 0
## 32 1
## 33 0
## 34 0
## 35 0
## 36 1
## 37 0
## 38 0
## 39 0
## 40 0
## 41 1
## 42 0
## 43 1
## 44 0
## 45 1
## 46 1
## 47 0
## 48 1
## 49 0
## 50 0
## 51 0
## 52 0
## 53 0
## 54 0
## 55 1
## 56 1
## 57 1
## 58 0
## 59 0
## 60 1
## 61 0
## 62 0
## 63 0
## 64 0
## 65 1
## 66 0
## 67 0
## 68 1
## 69 1
## 70 1
## 71 0
## 72 1
## 73 0
## 74 0
## 75 0
## 76 0
## 77 0
## 78 0
## 79 0
## 80 0
## 81 1
## 82 1
## 83 1
## 84 0
## 85 0
## 86 0
## 87 0
## 88 1
## 89 1
## 90 0
## 91 1
## 92 0
## 93 0
## 94 1
## 95 1
## 96 0
## 97 0
## 98 1
## 99 1
## 100 1
## 101 0
## 102 0
## 103 0
## 104 0
## 105 0
## 106 0
## 107 1
## 108 1
## 109 0
## 110 0
## 111 0
## 112 1
## 113 1
## 114 1
## 115 0
## 116 1
## 117 0
## 118 0
## 119 1
## 120 1
## 121 0
## 122 0
## 123 1
## 124 0
## 125 1
## 126 0
## 127 0
## 128 0
## 129 1
## 130 0
## 131 1
## 132 0
## 133 0
## 134 1
## 135 0
## 136 1
## 137 0
## 138 0
## 139 0
## 140 1
## 141 0
## 142 0
## 143 1
## 144 1
## 145 0
## 146 1
## 147 1
## 148 1
## 149 0
## 150 0
## 151 0
## 152 0
## 153 0
## 154 0
## 155 0
## 156 1
## 157 0
## 158 0
## 159 1
## 160 1
## 161 0
## 162 1
## 163 1
## 164 0
## 165 0
## 166 1
## 167 0
## 168 0
## 169 1
## 170 0
## 171 1
## 172 1
## 173 0
## 174 0
## 175 0
## 176 1
## 177 0
## 178 1
## 179 1
## 180 1
## 181 1
## 182 1
## 183 1
## 184 1
## 185 0
## 186 1
## 187 1
## 188 1
## 189 1
## 190 0
## 191 1
## 192 0
## 193 1
## 194 0
## 195 0
## 196 0
## 197 1
## 198 1
## 199 1
## 200 1
## 201 0
## 202 0
## 203 0
## 204 0
## 205 1
## 206 1
## 207 1
## 208 0
## 209 1
## 210 0
## 211 1
## 212 0
## 213 0
## 214 0
## 215 0
## 216 0
## 217 0
## 218 1
## 219 0
## 220 1
## 221 1
## 222 1
## 223 0
## 224 1
## 225 1
## 226 1
## 227 1
## 228 1
## 229 0
## 230 1
## 231 0
## 232 0
## 233 1
## 234 1
## 235 1
## 236 1
## 237 1
## 238 1
## 239 1
## 240 1
## 241 0
## 242 1
## 243 0
## 244 0
## 245 0
## 246 1
## 247 1
## 248 0
## 249 1
## 250 0
## 251 1
## 252 1
## 253 1
## 254 1
## 255 0
## 256 0
## 257 0
## 258 1
## 259 1
## 260 1
## 261 0
## 262 0
## 263 1
## 264 1
## 265 0
## 266 0
## 267 0
## 268 0
## 269 0
## 270 1
## 271 0
## 272 0
## 273 1
## 274 0
## 275 0
## 276 1
## 277 0
## 278 1
## 279 0
## 280 0
## 281 1
## 282 1
## 283 0
## 284 1
## 285 1
## 286 0
## 287 0
## 288 1
## 289 1
## 290 1
## 291 0
## 292 1
## 293 1
## 294 1
## 295 1
## 296 0
## 297 0
## 298 0
## 299 0
## 300 0
## 301 0
## 302 1
## 303 1
## 304 0
## 305 0
## 306 1
## 307 1
## 308 0
## 309 1
## 310 0
## 311 1
## 312 1
## 313 1
## 314 1
## 315 0
## 316 1
## 317 1
## 318 1
## 319 1
## 320 0
## 321 1
## 322 1
## 323 0
## 324 1
## 325 1
## 326 1
## 327 1
## 328 1
## 329 1
## 330 1
## 331 1
## 332 0
## 333 1
## 334 1
## 335 0
## 336 1
## 337 0
## 338 0
## 339 0
## 340 1
## 341 0
## 342 1
## 343 0
## 344 0
## 345 1
## 346 0
## 347 0
## 348 0
## 349 0
## 350 1
## 351 1
## 352 0
## 353 0
## 354 1
## 355 0
## 356 0
## 357 1
## 358 1
## 359 1
## 360 1
## 361 1
## 362 0
## 363 1
## 364 0
## 365 1
## 366 1
## 367 1
## 368 1
## 369 0
## 370 1
## 371 0
## 372 1
## 373 0
## 374 0
## 375 1
## 376 1
## 377 0
## 378 0
## 379 0
## 380 0
## 381 1
## 382 0
## 383 1
## 384 0
## 385 0
## 386 0
## 387 1
## 388 1
## 389 0
## 390 0
## 391 0
## 392 1
## 393 0
## 394 0
## 395 1
## 396 1
## 397 0
## 398 0
## 399 0
## 400 0
## 401 1
## 402 1
## 403 1
## 404 1
## 405 1
## 406 0
## 407 1
## 408 1
## 409 0
## 410 0
## 411 1
## 412 1
## 413 1
## 414 0
## 415 1
## 416 0
## 417 1
## 418 0
## 419 1
## 420 0
## 421 1
## 422 1
## 423 0
## 424 1
## 425 0
## 426 0
## 427 1
## 428 1
## 429 0
## 430 1
## 431 1
## 432 1
## 433 0
## 434 0
## 435 0
## 436 1
## 437 0
## 438 0
## 439 1
## 440 0
## 441 1
## 442 0
## 443 1
## 444 0
## 445 1
## 446 1
## 447 1
## 448 1
## 449 1
## 450 1
## 451 1
## 452 1
## 453 1
## 454 1
## 455 1
## 456 1
## 457 1
## 458 1
## 459 0
## 460 1
## 461 1
## 462 1
## 463 1
## 464 0
## 465 1
## 466 1
## 467 0
## 468 1
## 469 0
## 470 1
## 471 1
## 472 1
## 473 1
## 474 0
## 475 1
## 476 0
## 477 0
## 478 1
## 479 0
## 480 1
## 481 0
## 482 1
## 483 1
## 484 1
## 485 1
## 486 1
## 487 1
## 488 0
## 489 1
## 490 0
## 491 1
## 492 1
## 493 1
## 494 1
## 495 0
## 496 1
## 497 0
## 498 0
## 499 0
## 500 1
## 501 1
## 502 0
## 503 0
## 504 0
## 505 1
## 506 1
## 507 0
## 508 1
## 509 0
## 510 1
## 511 0
## 512 1
## 513 1
## 514 1
## 515 1
## 516 1
## 517 0
## 518 0
## 519 1
## 520 0
## 521 0
## 522 0
## 523 1
## 524 0
## 525 1
## 526 0
## 527 1
## 528 0
## 529 0
## 530 0
## 531 0
## 532 1
## 533 0
## 534 0
## 535 1
## 536 1
## 537 0
## 538 1
## 539 1
## 540 0
## 541 1
## 542 1
## 543 0
## 544 0
## 545 1
## 546 1
## 547 1
## 548 0
## 549 1
## 550 1
## 551 0
## 552 1
## 553 1
## 554 0
## 555 0
## 556 0
## 557 0
## 558 0
## 559 0
## 560 1
## 561 1
## 562 1
## 563 0
## 564 0
## 565 1
## 566 0
## 567 0
## 568 1
## 569 0
## 570 1
## 571 0
## 572 1
## 573 0
## 574 0
## 575 1
## 576 1
## 577 1
## 578 1
## 579 1
## 580 1
## 581 1
## 582 0
## 583 0
## 584 1
## 585 0
## 586 1
## 587 1
## 588 0
## 589 1
## 590 1
## 591 1
## 592 0
## 593 1
## 594 1
## 595 1
## 596 1
## 597 0
## 598 0
## 599 0
## 600 0
## 601 1
## 602 1
## 603 1
## 604 0
## 605 0
## 606 1
## 607 1
## 608 0
## 609 0
## 610 1
## 611 1
## 612 0
## 613 1
## 614 1
## 615 1
## 616 1
## 617 0
## 618 1
## 619 0
## 620 0
## 621 1
## 622 0
## 623 1
## 624 0
## 625 0
## 626 1
## 627 1
## 628 0
## 629 1
## 630 0
## 631 0
## 632 1
## 633 1
## 634 1
## 635 1
## 636 1
## 637 1
## 638 0
## 639 1
## 640 1
## 641 0
## 642 1
## 643 0
## 644 0
## 645 1
## 646 1
## 647 0
## 648 1
## 649 1
## 650 1
## 651 1
## 652 1
## 653 1
## 654 1
## 655 1
## 656 0
## 657 1
## 658 1
## 659 1
## 660 0
## 661 1
## 662 1
## 663 1
## 664 1
## 665 0
## 666 1
## 667 0
## 668 1
## 669 0
## 670 1
## 671 0
## 672 1
## 673 1
## 674 1
## 675 1
## 676 1
## 677 0
## 678 1
## 679 0
## 680 1
## 681 1
## 682 0
## 683 0
## 684 0
## 685 1
## 686 1
## 687 0
## 688 0
## 689 0
## 690 1
## 691 1
## 692 0
## 693 1
## 694 1
## 695 1
## 696 1
## 697 1
## 698 1
## 699 1
## 700 1
## 701 1
## 702 0
## 703 1
## 704 1
## 705 0
## 706 0
## 707 1
## 708 1
## 709 0
## 710 0
## 711 1
## 712 1
## 713 0
## 714 1
## 715 1
## 716 1
## 717 1
## 718 1
## 719 1
## 720 0
## 721 1
## 722 1
## 723 1
## 724 0
## 725 1
## 726 1
## 727 0
## 728 0
## 729 1
## 730 1
## 731 0
## 732 0
## 733 1
## 734 1
## 735 1
## 736 1
## 737 1
## 738 1
## 739 1
## 740 1
## 741 1
## 742 1
## 743 1
## 744 1
## 745 1
## 746 0
## 747 0
## 748 1
## 749 1
## 750 1
## 751 1
## 752 1
## 753 1
## 754 1
## 755 1
## 756 1
## 757 0
## 758 0
## 759 1
## 760 0
## 761 0
## 762 1
## 763 0
## 764 1
## 765 1
## 766 0
## 767 0
## 768 0
## 769 0
## 770 1
## 771 0
## 772 1
## 773 0
## 774 1
## 775 0
## 776 0
## 777 1
## 778 1
## 779 0
## 780 1
## 781 1
## 782 0
## 783 1
## 784 1
## 785 0
## 786 1
## 787 1
## 788 1
## 789 1
## 790 1
## 791 0
## 792 1
table(All)
## All
## 2 3 4 5
## 34 112 309 545
NB2 <- naiveBayes(x = train,y = train.labels.2)
NB3 <- naiveBayes(x = train,y = train.labels.3)
NB4 <- naiveBayes(x = train,y = train.labels.4)
NB5 <- naiveBayes(x = train,y = train.labels.5)
NB.Pred2 <- predict(NB2, test,type ="raw")
NB.Pred3 <- predict(NB3, test,type ="raw")
NB.Pred4 <- predict(NB4, test,type ="raw")
NB.Pred5 <- predict(NB5, test,type ="raw")
Voting.df <- data.frame(NB.Pred2, NB.Pred3,NB.Pred4,NB.Pred5)
colnames(Voting.df) <- c("Class 2: 0","Class2: 1","Class 3: 0","Class3: 1","Class 4: 0","Class4: 1","Class 5: 0","Class5: 1")
head(Voting.df)
## Class 2: 0 Class2: 1 Class 3: 0 Class3: 1 Class 4: 0 Class4: 1
## 1 1 2.656601e-10 0.99958992 4.100755e-04 0.002914419 0.997085581
## 2 1 3.027672e-08 0.99826258 1.737416e-03 0.957187151 0.042812849
## 3 1 3.314706e-10 0.98891826 1.108174e-02 0.985427503 0.014572497
## 4 1 9.377100e-11 0.08231858 9.176814e-01 0.499875127 0.500124873
## 5 1 1.598334e-08 0.99999551 4.493665e-06 0.993008581 0.006991419
## 6 1 9.877070e-13 0.99999916 8.355598e-07 0.998728876 0.001271124
## Class 5: 0 Class5: 1
## 1 6.985307e-01 0.3014693
## 2 2.259405e-01 0.7740595
## 3 2.900733e-03 0.9970993
## 4 8.873229e-01 0.1126771
## 5 6.657917e-04 0.9993342
## 6 1.414472e-06 0.9999986
Transformed.Voting.df <- Voting.df[seq(2,8,2)]
colnames(Transformed.Voting.df) <- c("2","3","4","5")
head(Transformed.Voting.df)
## 2 3 4 5
## 1 2.656601e-10 4.100755e-04 0.997085581 0.3014693
## 2 3.027672e-08 1.737416e-03 0.042812849 0.7740595
## 3 3.314706e-10 1.108174e-02 0.014572497 0.9970993
## 4 9.377100e-11 9.176814e-01 0.500124873 0.1126771
## 5 1.598334e-08 4.493665e-06 0.006991419 0.9993342
## 6 9.877070e-13 8.355598e-07 0.001271124 0.9999986
Evaluation <- Transformed.Voting.df
Index <- as.numeric(apply(Transformed.Voting.df,MARGIN = 1,which.max))
Index <- Index+1
Evaluation$Vote <- Index
Evaluation$Actual <- test.labels
head(Evaluation,100)
## 2 3 4 5 Vote Actual
## 1 2.656601e-10 4.100755e-04 9.970856e-01 0.301469256 4 4
## 2 3.027672e-08 1.737416e-03 4.281285e-02 0.774059481 5 5
## 3 3.314706e-10 1.108174e-02 1.457250e-02 0.997099267 5 5
## 4 9.377100e-11 9.176814e-01 5.001249e-01 0.112677092 3 4
## 5 1.598334e-08 4.493665e-06 6.991419e-03 0.999334208 5 4
## 6 9.877070e-13 8.355598e-07 1.271124e-03 0.999998586 5 4
## 7 7.513113e-09 2.930405e-03 1.263555e-01 0.988488685 5 5
## 8 1.366758e-09 2.234735e-03 1.472262e-01 0.989601005 5 3
## 9 5.573404e-15 5.496146e-08 2.498668e-03 0.999998511 5 5
## 10 2.593433e-09 2.650005e-03 8.985755e-02 0.993118303 5 3
## 11 1.426249e-12 3.005920e-06 5.409320e-02 0.999678472 5 4
## 12 6.113047e-13 2.177534e-01 1.160222e-03 0.995863327 5 3
## 13 4.017826e-11 8.130675e-07 5.198338e-03 0.999968753 5 5
## 14 5.374400e-11 1.560501e-02 2.278621e-02 0.995554293 5 4
## 15 7.411730e-12 2.195639e-04 9.453202e-01 0.822499700 4 4
## 16 3.038650e-13 9.853303e-01 2.205843e-06 0.997791077 5 5
## 17 2.506379e-15 1.697801e-08 4.026645e-03 0.999993620 5 4
## 18 3.177208e-11 7.499731e-05 8.457607e-03 0.999854713 5 5
## 19 9.593959e-13 9.094688e-04 6.936251e-01 0.852752176 5 4
## 20 5.499081e-14 1.203674e-07 2.017879e-01 0.999703045 5 2
## 21 3.968502e-12 8.875383e-01 9.486989e-01 0.005219445 4 4
## 22 2.802597e-13 6.562542e-04 9.519445e-01 0.602815995 4 4
## 23 1.067966e-09 4.770582e-04 2.019171e-01 0.992034981 5 3
## 24 3.905008e-15 1.351421e-01 5.539470e-02 0.943706541 5 5
## 25 2.068721e-12 6.994298e-06 1.054964e-03 0.999904396 5 4
## 26 5.757432e-13 1.210275e-05 1.250259e-05 0.999999762 5 5
## 27 2.511721e-15 4.561743e-06 4.091790e-04 0.999984718 5 4
## 28 2.960746e-11 5.897675e-04 5.156199e-01 0.980676989 5 5
## 29 2.174274e-13 6.781334e-03 9.762555e-01 0.131444133 4 3
## 30 1.387161e-08 4.672608e-03 1.450513e-01 0.982952014 5 3
## 31 1.248272e-15 8.963828e-07 2.189582e-06 0.999999866 5 5
## 32 2.862108e-15 3.714588e-09 9.441263e-01 0.995038684 5 5
## 33 8.217790e-15 2.948440e-02 1.976027e-01 0.927137942 5 5
## 34 1.960147e-09 5.774609e-03 2.071606e-01 0.978235158 5 3
## 35 1.978006e-10 3.953934e-05 1.748589e-01 0.972443186 5 4
## 36 1.541476e-12 2.423370e-03 1.557624e-03 0.999308356 5 4
## 37 1.250716e-09 6.750232e-04 4.703267e-01 0.978054807 5 5
## 38 1.505652e-12 5.118462e-04 7.864402e-01 0.959106663 5 3
## 39 6.219297e-15 5.268521e-08 7.796208e-07 0.999999998 5 5
## 40 2.309465e-10 3.570318e-05 1.042687e-01 0.993556081 5 3
## 41 1.548140e-15 3.190345e-09 8.171712e-09 1.000000000 5 5
## 42 1.063465e-11 9.579086e-01 7.893216e-02 0.368445357 3 4
## 43 1.382267e-09 5.509181e-04 9.947125e-01 0.310465718 4 5
## 44 4.646259e-13 5.312884e-04 9.999749e-01 0.001520058 4 3
## 45 2.250819e-12 1.549313e-03 1.584193e-06 0.999951166 5 5
## 46 1.473672e-11 3.948985e-04 1.051346e-02 0.999258026 5 4
## 47 6.899357e-11 1.062270e-02 1.088930e-02 0.984671096 5 5
## 48 4.656211e-13 2.290727e-08 1.341556e-02 0.999960230 5 3
## 49 2.305951e-16 2.547360e-08 3.276179e-06 0.999999845 5 5
## 50 3.297358e-13 4.878389e-04 3.542370e-02 0.998604250 5 5
## 51 9.253791e-12 2.477073e-05 7.400131e-03 0.999858401 5 4
## 52 1.237675e-12 2.262654e-06 7.853942e-02 0.999760558 5 5
## 53 3.377206e-11 8.759486e-05 7.814669e-01 0.981250553 5 4
## 54 3.232568e-10 5.331411e-04 4.932273e-01 0.980867177 5 4
## 55 1.462543e-10 3.317867e-03 2.379387e-03 0.999677003 5 4
## 56 1.867125e-10 3.181334e-05 1.459739e-02 0.997593732 5 5
## 57 6.127693e-11 1.621743e-04 4.352489e-02 0.998894750 5 2
## 58 1.645445e-15 2.342122e-08 1.700546e-01 0.999705102 5 5
## 59 1.368008e-11 1.455042e-05 1.143775e-02 0.999528342 5 4
## 60 5.694557e-11 5.185868e-04 6.098719e-01 0.954357091 5 4
## 61 2.094117e-12 2.380453e-03 6.248655e-04 0.999784914 5 5
## 62 6.042752e-13 4.741370e-03 8.718178e-04 0.999675989 5 5
## 63 4.864104e-13 9.720062e-06 7.472039e-02 0.999323518 5 4
## 64 7.206423e-15 7.159892e-05 1.071134e-01 0.994396561 5 5
## 65 9.352260e-17 2.927800e-08 4.564051e-07 0.999999993 5 5
## 66 1.375132e-10 1.044508e-07 7.349129e-05 0.999995627 5 5
## 67 1.326586e-12 3.662118e-04 4.317271e-03 0.999818224 5 3
## 68 5.106944e-11 1.682085e-03 8.979678e-02 0.987772219 5 3
## 69 2.629551e-15 1.073282e-05 8.750716e-01 0.974080250 5 4
## 70 1.208068e-11 1.855264e-03 2.998699e-02 0.997161591 5 4
## 71 3.097973e-12 9.781653e-07 2.929753e-02 0.999470394 5 5
## 72 7.682498e-14 3.491678e-09 8.064251e-04 0.999998050 5 5
## 73 7.213120e-09 2.566845e-04 2.730933e-01 0.968476633 5 2
## 74 6.530119e-09 2.757013e-04 2.549625e-01 0.970401074 5 4
## 75 7.637253e-09 1.716341e-05 1.187086e-01 0.758309432 5 4
## 76 2.257566e-13 1.156959e-02 9.639752e-01 0.068207944 4 5
## 77 9.235732e-14 1.696645e-06 9.999990e-01 0.002289955 4 3
## 78 1.095623e-11 2.195774e-07 3.684053e-01 0.996225749 5 4
## 79 2.166294e-09 2.169098e-03 1.096392e-01 0.991571514 5 5
## 80 2.905644e-18 8.443860e-08 1.115403e-01 0.999704535 5 5
## 81 1.544934e-14 3.550888e-07 2.885393e-03 0.999995493 5 5
## 82 1.581843e-14 4.293367e-08 2.872968e-06 0.999999938 5 4
## 83 9.998324e-01 1.663443e-04 1.238672e-03 0.027239616 2 5
## 84 1.594487e-17 1.796796e-08 1.201575e-04 0.999999862 5 5
## 85 1.422776e-14 1.132826e-05 2.659321e-05 0.999999002 5 5
## 86 4.468122e-02 1.452645e-01 7.931956e-04 0.021855840 3 3
## 87 3.464035e-13 3.748193e-03 1.564791e-06 0.999993155 5 4
## 88 7.846061e-10 3.727272e-04 9.985471e-01 0.156288652 4 5
## 89 1.660016e-16 4.345004e-08 9.999969e-01 0.040151826 4 5
## 90 6.715316e-13 1.417874e-06 3.143174e-04 0.999997718 5 3
## 91 2.768488e-12 1.476594e-02 1.150267e-02 0.965510627 5 3
## 92 6.056657e-14 3.022654e-04 9.996661e-01 0.003126025 4 5
## 93 4.750743e-12 1.389049e-05 9.970529e-01 0.448328579 4 4
## 94 6.817468e-11 3.846472e-05 9.957247e-01 0.272606560 4 5
## 95 2.120974e-12 9.561868e-04 1.959761e-02 0.998667535 5 5
## 96 2.858004e-12 3.015733e-05 9.997490e-01 0.043614595 4 5
## 97 3.242211e-12 6.663827e-05 2.761228e-02 0.999632884 5 5
## 98 2.573407e-16 1.041484e-01 4.562950e-01 0.662872115 5 5
## 99 6.918874e-08 2.321362e-05 5.101505e-05 0.999752507 5 2
## 100 3.902271e-12 3.023204e-03 1.994198e-01 0.980194859 5 5
CM <- table(Evaluation$Actual,Evaluation$Vote)
CM
##
## 2 3 4 5
## 2 0 0 1 6
## 3 0 3 3 19
## 4 0 2 13 47
## 5 1 0 12 101
#Proportions
Overall <- length(Evaluation$Actual)
Length2 <- length(which(Evaluation$Actual==2))
Length3 <- length(which(Evaluation$Actual==3))
Length4 <- length(which(Evaluation$Actual==4))
Length5 <- length(which(Evaluation$Actual==5))
#Accuracy
Accuracy <- sum(diag(CM))/sum(CM)
#Precision
Precision <- diag(CM)/rowSums(CM)
Precision <- (Precision[1]*Length2+Precision[2]*Length3+Precision[3]*Length4+Precision[4]*Length5)/208
#Recall
Recall <- diag(CM)/colSums(CM)
Recall <- (Recall[1]*Length2+Recall[2]*Length3+Recall[3]*Length4+Recall[4]*Length5)/208
Accuracy
## [1] 0.5625
Precision
## 2
## 0.5625
Recall
## 2
## 0.5257116