1.Exportação de Dados

library(dslabs)
library(matrixStats)
library(tidyverse)
## -- Attaching packages ---------------------------------------------------------------------------------------------------------------- tidyverse 1.3.0 --
## v ggplot2 3.2.1     v purrr   0.3.3
## v tibble  2.1.3     v dplyr   0.8.3
## v tidyr   1.0.0     v stringr 1.4.0
## v readr   1.3.1     v forcats 0.4.0
## -- Conflicts ------------------------------------------------------------------------------------------------------------------- tidyverse_conflicts() --
## x dplyr::count()  masks matrixStats::count()
## x dplyr::filter() masks stats::filter()
## x dplyr::lag()    masks stats::lag()
library(dslabs)
library(GGally)
## Registered S3 method overwritten by 'GGally':
##   method from   
##   +.gg   ggplot2
## 
## Attaching package: 'GGally'
## The following object is masked from 'package:dplyr':
## 
##     nasa
library(caret)
## Loading required package: lattice
## 
## Attaching package: 'caret'
## The following object is masked from 'package:purrr':
## 
##     lift
library(purrr)
library(class)
library(Rborist)
## Rborist 0.2-3
## Type RboristNews() to see new features/changes/bug fixes.
library(rafalib)
library(tidyverse)

mnist <- read_mnist()

names(mnist)
## [1] "train" "test"
dim(mnist$train$images)
## [1] 60000   784
class(mnist$train$labels)
## [1] "integer"
table(mnist$train$labels)
## 
##    0    1    2    3    4    5    6    7    8    9 
## 5923 6742 5958 6131 5842 5421 5918 6265 5851 5949
# sample 10k rows from training set, 1k rows from test set
set.seed(123)
index <- sample(nrow(mnist$train$images), 10000)
x <- mnist$train$images[index,]
y <- factor(mnist$train$labels[index])

index <- sample(nrow(mnist$test$images), 1000)
#note that the line above is the corrected code - code in video at 0:52 is incorrect
x_test <- mnist$test$images[index,]
y_test <- factor(mnist$test$labels[index])
  1. Pré-Processamento
sds <- colSds(x)
qplot(sds, bins = 256, color = I('black'))

col_index <- setdiff(1:ncol(x), x)
length(col_index)
## [1] 529
nzv <- nearZeroVar(x)
image(matrix(1:784 %in% nzv, 28, 28))

image(matrix(1:784 %in% x, 28, 28))

col_index <- setdiff(1:ncol(x), nzv)
length(col_index)
## [1] 249
  1. KNN e Random Forest
colnames(x) <- 1:ncol(mnist$train$images)
colnames(x_test) <- colnames(x)

#control <- trainControl(method = "cv", number = 10, p = .9)
#train_knn <- train(x[,col_index], y,
   #                             method = "knn", 
  #                              tuneGrid = data.frame(k = c(1,3,5,7)),
 #                               trControl = control)
#ggplot(train_knn)



n <- 1000
b <- 2
index <- sample(nrow(x), n)
control <- trainControl(method = "cv", number = b, p = .9)
train_knn <- train(x[index ,col_index], y[index],
                   method = "knn",
                   tuneGrid = data.frame(k = c(3,5,7)),
                   trControl = control)
fit_knn <- knn3(x[ ,col_index], y,  k = 3)


y_hat_knn <- predict(fit_knn,
                     x_test[, col_index],
                     type="class")
cm <- confusionMatrix(y_hat_knn, factor(y_test))
cm
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1   2   3   4   5   6   7   8   9
##          0 106   0   0   0   0   1   3   0   0   2
##          1   0 111   1   0   0   1   2   1   1   0
##          2   0   1  89   0   0   0   0   0   1   1
##          3   0   0   0 101   0   4   0   0   4   0
##          4   0   0   0   0  82   0   0   0   1   4
##          5   0   0   0   5   0  82   1   0   0   0
##          6   1   0   2   1   0   1 101   0   2   0
##          7   1   0   4   1   0   0   0  96   0   0
##          8   0   0   0   0   1   4   0   0  88   0
##          9   0   0   0   0   2   1   0   2   0  87
## 
## Overall Statistics
##                                           
##                Accuracy : 0.943           
##                  95% CI : (0.9268, 0.9565)
##     No Information Rate : 0.112           
##     P-Value [Acc > NIR] : < 2.2e-16       
##                                           
##                   Kappa : 0.9366          
##                                           
##  Mcnemar's Test P-Value : NA              
## 
## Statistics by Class:
## 
##                      Class: 0 Class: 1 Class: 2 Class: 3 Class: 4 Class: 5
## Sensitivity            0.9815   0.9911   0.9271   0.9352   0.9647   0.8723
## Specificity            0.9933   0.9932   0.9967   0.9910   0.9945   0.9934
## Pos Pred Value         0.9464   0.9487   0.9674   0.9266   0.9425   0.9318
## Neg Pred Value         0.9977   0.9989   0.9923   0.9921   0.9967   0.9868
## Prevalence             0.1080   0.1120   0.0960   0.1080   0.0850   0.0940
## Detection Rate         0.1060   0.1110   0.0890   0.1010   0.0820   0.0820
## Detection Prevalence   0.1120   0.1170   0.0920   0.1090   0.0870   0.0880
## Balanced Accuracy      0.9874   0.9922   0.9619   0.9631   0.9796   0.9329
##                      Class: 6 Class: 7 Class: 8 Class: 9
## Sensitivity            0.9439   0.9697   0.9072   0.9255
## Specificity            0.9922   0.9933   0.9945   0.9945
## Pos Pred Value         0.9352   0.9412   0.9462   0.9457
## Neg Pred Value         0.9933   0.9967   0.9901   0.9923
## Prevalence             0.1070   0.0990   0.0970   0.0940
## Detection Rate         0.1010   0.0960   0.0880   0.0870
## Detection Prevalence   0.1080   0.1020   0.0930   0.0920
## Balanced Accuracy      0.9680   0.9815   0.9508   0.9600
control <- trainControl(method="cv", number = 5, p = 0.8)
grid <- expand.grid(minNode = c(1,5) , predFixed = c(10, 15, 25, 35, 50))
train_rf <-  train(x[, col_index], y,
                   method = "Rborist",
                   nTree = 50,
                   trControl = control,
                   tuneGrid = grid,
                   nSamp = 5000)
ggplot(train_rf)

train_rf$bestTune
##   predFixed minNode
## 2        15       1
fit_rf <- Rborist(x[, col_index], y,
                  nTree = 1000,
                  minNode = train_rf$bestTune$minNode,
                  predFixed = train_rf$bestTune$predFixed)

y_hat_rf <- factor(levels(y)[predict(fit_rf, x_test[ ,col_index])$yPred])
cm <- confusionMatrix(y_hat_rf, y_test)
cm
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1   2   3   4   5   6   7   8   9
##          0 108   0   0   0   0   3   0   0   0   1
##          1   0 110   0   0   0   1   1   0   0   0
##          2   0   2  93   0   0   1   0   3   0   0
##          3   0   0   0 105   0   3   1   0   0   1
##          4   0   0   0   0  82   0   2   1   1   3
##          5   0   0   0   2   0  83   1   0   1   2
##          6   0   0   1   1   0   0 101   0   1   0
##          7   0   0   2   0   0   1   0  93   0   0
##          8   0   0   0   0   1   2   1   0  94   0
##          9   0   0   0   0   2   0   0   2   0  87
## 
## Overall Statistics
##                                           
##                Accuracy : 0.956           
##                  95% CI : (0.9414, 0.9679)
##     No Information Rate : 0.112           
##     P-Value [Acc > NIR] : < 2.2e-16       
##                                           
##                   Kappa : 0.9511          
##                                           
##  Mcnemar's Test P-Value : NA              
## 
## Statistics by Class:
## 
##                      Class: 0 Class: 1 Class: 2 Class: 3 Class: 4 Class: 5
## Sensitivity            1.0000   0.9821   0.9688   0.9722   0.9647   0.8830
## Specificity            0.9955   0.9977   0.9934   0.9944   0.9923   0.9934
## Pos Pred Value         0.9643   0.9821   0.9394   0.9545   0.9213   0.9326
## Neg Pred Value         1.0000   0.9977   0.9967   0.9966   0.9967   0.9879
## Prevalence             0.1080   0.1120   0.0960   0.1080   0.0850   0.0940
## Detection Rate         0.1080   0.1100   0.0930   0.1050   0.0820   0.0830
## Detection Prevalence   0.1120   0.1120   0.0990   0.1100   0.0890   0.0890
## Balanced Accuracy      0.9978   0.9899   0.9811   0.9833   0.9785   0.9382
##                      Class: 6 Class: 7 Class: 8 Class: 9
## Sensitivity            0.9439   0.9394   0.9691   0.9255
## Specificity            0.9966   0.9967   0.9956   0.9956
## Pos Pred Value         0.9712   0.9687   0.9592   0.9560
## Neg Pred Value         0.9933   0.9934   0.9967   0.9923
## Prevalence             0.1070   0.0990   0.0970   0.0940
## Detection Rate         0.1010   0.0930   0.0940   0.0870
## Detection Prevalence   0.1040   0.0960   0.0980   0.0910
## Balanced Accuracy      0.9703   0.9680   0.9823   0.9606
rafalib::mypar(3,4)
for(i in 1:12){
     image(matrix(x_test[i,], 28, 28)[, 28:1], 
           main = paste("Our prediction:", y_hat_rf[i]),
           xaxt="n", yaxt="n")
}

## Variaveis mais Importantes

library(randomForest)
## randomForest 4.6-14
## Type rfNews() to see new features/changes/bug fixes.
## 
## Attaching package: 'randomForest'
## The following object is masked from 'package:dplyr':
## 
##     combine
## The following object is masked from 'package:ggplot2':
## 
##     margin
x <- mnist$train$images[index,]
y <- factor(mnist$train$labels[index])
rf <- randomForest(x, y,  ntree = 50)
imp <- importance(rf)
imp
##     MeanDecreaseGini
## 1         0.00000000
## 2         0.00000000
## 3         0.00000000
## 4         0.00000000
## 5         0.00000000
## 6         0.00000000
## 7         0.00000000
## 8         0.00000000
## 9         0.00000000
## 10        0.00000000
## 11        0.00000000
## 12        0.00000000
## 13        0.00000000
## 14        0.00000000
## 15        0.00000000
## 16        0.00000000
## 17        0.00000000
## 18        0.00000000
## 19        0.00000000
## 20        0.00000000
## 21        0.00000000
## 22        0.00000000
## 23        0.00000000
## 24        0.00000000
## 25        0.00000000
## 26        0.00000000
## 27        0.00000000
## 28        0.00000000
## 29        0.00000000
## 30        0.00000000
## 31        0.00000000
## 32        0.00000000
## 33        0.00000000
## 34        0.00000000
## 35        0.00000000
## 36        0.00000000
## 37        0.00000000
## 38        0.00000000
## 39        0.00000000
## 40        0.00000000
## 41        0.00000000
## 42        0.00000000
## 43        0.00000000
## 44        0.03516667
## 45        0.00000000
## 46        0.00000000
## 47        0.00000000
## 48        0.00000000
## 49        0.00000000
## 50        0.00000000
## 51        0.00000000
## 52        0.00000000
## 53        0.00000000
## 54        0.00000000
## 55        0.00000000
## 56        0.00000000
## 57        0.00000000
## 58        0.00000000
## 59        0.00000000
## 60        0.00000000
## 61        0.00000000
## 62        0.00000000
## 63        0.02000000
## 64        0.00000000
## 65        0.00000000
## 66        0.00000000
## 67        0.04636364
## 68        0.04000000
## 69        0.08689474
## 70        0.36721179
## 71        0.05200000
## 72        0.09434792
## 73        0.00000000
## 74        0.18555547
## 75        0.00000000
## 76        0.12999574
## 77        0.00000000
## 78        0.00000000
## 79        0.00000000
## 80        0.00000000
## 81        0.00000000
## 82        0.00000000
## 83        0.00000000
## 84        0.00000000
## 85        0.00000000
## 86        0.00000000
## 87        0.00000000
## 88        0.00000000
## 89        0.00000000
## 90        0.03733333
## 91        0.00000000
## 92        0.00000000
## 93        0.00000000
## 94        0.09568627
## 95        0.00000000
## 96        0.13506603
## 97        0.30127083
## 98        0.28552413
## 99        0.73202957
## 100       1.08493503
## 101       4.68703988
## 102       5.04271550
## 103       1.57166777
## 104       1.57234645
## 105       0.08400000
## 106       0.15950034
## 107       0.00000000
## 108       0.00000000
## 109       0.00000000
## 110       0.00000000
## 111       0.00000000
## 112       0.00000000
## 113       0.00000000
## 114       0.00000000
## 115       0.00000000
## 116       0.00000000
## 117       0.00000000
## 118       0.06163478
## 119       0.00000000
## 120       0.05851852
## 121       0.22669765
## 122       0.82722330
## 123       0.27007269
## 124       0.86211226
## 125       1.52033476
## 126       0.96086099
## 127       1.50438791
## 128       3.54764249
## 129       1.01390667
## 130       1.43391587
## 131       0.56584532
## 132       0.54811401
## 133       0.48051006
## 134       0.22830974
## 135       0.13119349
## 136       0.07692308
## 137       0.00000000
## 138       0.00000000
## 139       0.00000000
## 140       0.00000000
## 141       0.00000000
## 142       0.00000000
## 143       0.00000000
## 144       0.00000000
## 145       0.04600000
## 146       0.00000000
## 147       0.07400000
## 148       0.11866667
## 149       0.92668926
## 150       1.40555513
## 151       4.68572705
## 152       4.17026173
## 153       5.87704055
## 154       1.91873099
## 155       4.40723963
## 156       5.71669443
## 157       4.35657287
## 158       1.38923402
## 159       2.21977137
## 160       1.40883234
## 161       1.03124587
## 162       0.57810047
## 163       0.47184358
## 164       0.03780952
## 165       0.30549994
## 166       0.07373333
## 167       0.00000000
## 168       0.00000000
## 169       0.00000000
## 170       0.00000000
## 171       0.00000000
## 172       0.00000000
## 173       0.09370019
## 174       0.00000000
## 175       0.27557843
## 176       0.52792605
## 177       1.40932456
## 178       2.79206099
## 179       1.77499637
## 180       1.24747327
## 181       3.31881264
## 182       3.07029102
## 183       2.82974892
## 184       4.37295461
## 185       2.64050607
## 186       2.79952867
## 187       1.59238504
## 188       1.18604264
## 189       1.49483845
## 190       0.44617275
## 191       0.93783638
## 192       0.53405921
## 193       0.39931400
## 194       0.00000000
## 195       0.00000000
## 196       0.00000000
## 197       0.00000000
## 198       0.00000000
## 199       0.00000000
## 200       0.00000000
## 201       0.02000000
## 202       0.14943124
## 203       0.33511584
## 204       0.53502464
## 205       1.79593215
## 206       2.83933854
## 207       2.24164647
## 208       3.91093248
## 209       2.05235690
## 210       2.71847911
## 211       6.38065178
## 212       6.23179650
## 213       4.68900448
## 214       1.73924526
## 215       1.56738867
## 216       1.58952455
## 217       1.32875984
## 218       1.08206621
## 219       1.55005787
## 220       0.88038331
## 221       0.68676631
## 222       0.13601504
## 223       0.03333333
## 224       0.00000000
## 225       0.00000000
## 226       0.00000000
## 227       0.00000000
## 228       0.00000000
## 229       0.15600000
## 230       0.42974259
## 231       0.35458177
## 232       1.13432504
## 233       1.03361790
## 234       0.85646594
## 235       2.42345161
## 236       1.78527110
## 237       2.28375888
## 238       3.04775075
## 239       1.87318410
## 240       5.83392466
## 241       2.74398802
## 242       3.00501290
## 243       2.53730745
## 244       2.07290226
## 245       1.48601327
## 246       1.42769332
## 247       1.35162673
## 248       0.35846852
## 249       1.15078033
## 250       0.15242910
## 251       0.00000000
## 252       0.00000000
## 253       0.00000000
## 254       0.00000000
## 255       0.10892273
## 256       0.00000000
## 257       0.00000000
## 258       0.11460423
## 259       0.67189363
## 260       1.05929168
## 261       0.87284662
## 262       1.47123503
## 263       2.57743127
## 264       4.58818331
## 265       3.03614642
## 266       2.70862633
## 267       2.74009087
## 268       3.01405686
## 269       1.51488979
## 270       2.55274141
## 271       3.28748755
## 272       1.69413259
## 273       3.05533853
## 274       0.76321495
## 275       1.22615232
## 276       0.52747514
## 277       0.20914883
## 278       0.10381702
## 279       0.00000000
## 280       0.00000000
## 281       0.00000000
## 282       0.00000000
## 283       0.00000000
## 284       0.03544118
## 285       0.09444884
## 286       0.47771035
## 287       0.70334009
## 288       0.87812128
## 289       0.40111496
## 290       2.92116840
## 291       3.66874000
## 292       4.01775784
## 293       2.43258606
## 294       1.71425016
## 295       1.33642211
## 296       3.64906830
## 297       3.07775929
## 298       2.98491008
## 299       2.39849334
## 300       3.15774539
## 301       3.41660471
## 302       0.79485547
## 303       0.69870281
## 304       0.12891228
## 305       0.03911111
## 306       0.00000000
## 307       0.00000000
## 308       0.00000000
## 309       0.00000000
## 310       0.00000000
## 311       0.03709756
## 312       0.00000000
## 313       0.00000000
## 314       0.23500204
## 315       0.34771543
## 316       0.37812121
## 317       1.49731198
## 318       3.39817158
## 319       3.21356729
## 320       4.30391475
## 321       2.79338704
## 322       3.23442598
## 323       4.53620675
## 324       6.23768947
## 325       3.09680293
## 326       3.81904880
## 327       4.50140604
## 328       3.10128683
## 329       1.92431416
## 330       1.35260449
## 331       1.93857348
## 332       0.05000000
## 333       0.05538462
## 334       0.00000000
## 335       0.00000000
## 336       0.00000000
## 337       0.00000000
## 338       0.00000000
## 339       0.00000000
## 340       0.00000000
## 341       0.06733333
## 342       0.08484848
## 343       0.36767341
## 344       1.20439248
## 345       3.13319339
## 346       3.06492213
## 347       4.98804289
## 348       6.07968895
## 349       4.01331000
## 350       4.68493310
## 351       9.46559718
## 352       1.93826110
## 353       3.69963454
## 354       3.48648385
## 355       2.53188935
## 356       1.54225652
## 357       0.71324339
## 358       2.22883633
## 359       5.48484248
## 360       1.01508920
## 361       0.00000000
## 362       0.00000000
## 363       0.00000000
## 364       0.00000000
## 365       0.00000000
## 366       0.00000000
## 367       0.00000000
## 368       0.00000000
## 369       0.02000000
## 370       0.29563294
## 371       0.82341385
## 372       1.31551836
## 373       2.60617109
## 374       8.72354804
## 375       4.39793196
## 376       4.05508066
## 377       3.66204560
## 378       7.14313149
## 379      11.47782692
## 380       4.02405923
## 381       6.65865191
## 382       5.35600229
## 383       4.97977124
## 384       1.33112215
## 385       1.41951181
## 386       2.24618133
## 387       1.97624048
## 388       0.09466667
## 389       0.28902944
## 390       0.00000000
## 391       0.00000000
## 392       0.00000000
## 393       0.00000000
## 394       0.00000000
## 395       0.00000000
## 396       0.07704954
## 397       0.05777778
## 398       0.40708408
## 399       1.02550425
## 400       2.28213028
## 401       3.46655168
## 402       3.27674110
## 403       5.86298444
## 404       7.20104787
## 405       2.64313252
## 406       3.69009971
## 407       7.93163228
## 408       4.77081852
## 409       4.13594681
## 410       5.66789399
## 411       4.13182152
## 412       0.75392899
## 413       0.84953320
## 414       1.21134441
## 415       1.93291884
## 416       0.09549020
## 417       0.13028571
## 418       0.00000000
## 419       0.00000000
## 420       0.00000000
## 421       0.00000000
## 422       0.00000000
## 423       0.00000000
## 424       0.00000000
## 425       0.04000000
## 426       0.30841684
## 427       0.81554285
## 428       3.60507602
## 429       5.50947189
## 430       3.60975038
## 431       4.96652344
## 432       4.88709906
## 433       2.93491528
## 434       5.46926986
## 435       7.06636675
## 436       5.08719654
## 437       2.90371908
## 438       7.27514142
## 439       2.99415287
## 440       1.54610067
## 441       1.40235844
## 442       2.83833042
## 443       1.28776026
## 444       0.19133568
## 445       0.14320938
## 446       0.09331263
## 447       0.00000000
## 448       0.00000000
## 449       0.00000000
## 450       0.00000000
## 451       0.00000000
## 452       0.03733333
## 453       0.03733333
## 454       0.32777839
## 455       0.96981953
## 456       3.02911828
## 457       3.22458532
## 458       2.51985596
## 459       3.67894390
## 460       2.62268307
## 461       3.31313400
## 462       8.28346874
## 463       4.62456791
## 464       3.91809077
## 465       4.92617301
## 466       4.32000975
## 467       4.45803263
## 468       0.99933785
## 469       1.84532901
## 470       2.06830432
## 471       0.69833335
## 472       0.26209607
## 473       0.00000000
## 474       0.06933333
## 475       0.00000000
## 476       0.00000000
## 477       0.00000000
## 478       0.00000000
## 479       0.00000000
## 480       0.03000000
## 481       0.11140741
## 482       1.02771869
## 483       1.97080441
## 484       2.95609582
## 485       2.51083357
## 486       3.46723910
## 487       3.68246577
## 488       2.90238826
## 489       5.78384945
## 490       5.77159785
## 491       3.80868416
## 492       1.71665004
## 493       3.75454800
## 494       4.07300152
## 495       2.65116795
## 496       1.94406880
## 497       1.75076673
## 498       1.91859822
## 499       0.96021324
## 500       0.40140175
## 501       0.25056246
## 502       0.07363636
## 503       0.00000000
## 504       0.00000000
## 505       0.00000000
## 506       0.02000000
## 507       0.00000000
## 508       0.00000000
## 509       0.32431244
## 510       0.13021068
## 511       3.36154083
## 512       3.35951337
## 513       2.23100073
## 514       3.19548488
## 515       5.85639361
## 516       4.15502382
## 517       1.99351557
## 518       3.75605123
## 519       2.18299992
## 520       1.83143505
## 521       1.35589572
## 522       1.92326430
## 523       1.96546328
## 524       2.25231128
## 525       1.15158083
## 526       0.91700287
## 527       0.89076084
## 528       0.50975551
## 529       0.37852677
## 530       0.00000000
## 531       0.00000000
## 532       0.00000000
## 533       0.00000000
## 534       0.00000000
## 535       0.00000000
## 536       0.05804356
## 537       0.77476242
## 538       0.67624571
## 539       3.33551360
## 540       4.16432974
## 541       2.21982230
## 542       2.19345087
## 543       4.57817152
## 544       6.95054479
## 545       2.92951925
## 546       1.85221842
## 547       1.17263405
## 548       1.49532249
## 549       2.02780060
## 550       2.99963359
## 551       2.46476172
## 552       1.28620386
## 553       0.87180205
## 554       1.23930996
## 555       0.60914594
## 556       0.31514449
## 557       0.19743834
## 558       0.16860450
## 559       0.00000000
## 560       0.00000000
## 561       0.00000000
## 562       0.00000000
## 563       0.00000000
## 564       0.09333333
## 565       0.05200000
## 566       0.49910464
## 567       2.57215427
## 568       2.36390248
## 569       1.82455746
## 570       1.59861240
## 571       3.27335049
## 572       2.53209265
## 573       2.57830880
## 574       2.05509207
## 575       1.60418257
## 576       1.90174687
## 577       1.80839016
## 578       2.10968678
## 579       2.20883703
## 580       0.82420996
## 581       0.85477704
## 582       0.38647854
## 583       0.37883033
## 584       0.22213566
## 585       0.00000000
## 586       0.00000000
## 587       0.00000000
## 588       0.00000000
## 589       0.00000000
## 590       0.00000000
## 591       0.00000000
## 592       0.05100000
## 593       0.42675075
## 594       0.23786667
## 595       0.92749849
## 596       2.71354684
## 597       4.85195543
## 598       2.22579617
## 599       2.95441913
## 600       1.75202828
## 601       1.72084819
## 602       1.74954019
## 603       1.00692574
## 604       1.25952725
## 605       1.58921090
## 606       1.63939731
## 607       1.97333414
## 608       1.62371665
## 609       0.49585811
## 610       0.69558859
## 611       0.46934290
## 612       0.00000000
## 613       0.00000000
## 614       0.00000000
## 615       0.00000000
## 616       0.00000000
## 617       0.00000000
## 618       0.00000000
## 619       0.00000000
## 620       0.00000000
## 621       0.03733333
## 622       0.30914575
## 623       0.68692214
## 624       0.88368123
## 625       2.62290773
## 626       3.55930591
## 627       4.44395957
## 628       1.67945553
## 629       2.43707112
## 630       2.16416834
## 631       0.85225581
## 632       1.76399346
## 633       1.40422280
## 634       1.25624628
## 635       0.64721428
## 636       0.59034628
## 637       0.34288740
## 638       0.40944949
## 639       0.17289779
## 640       0.10228600
## 641       0.05714286
## 642       0.00000000
## 643       0.00000000
## 644       0.00000000
## 645       0.00000000
## 646       0.00000000
## 647       0.00000000
## 648       0.00000000
## 649       0.02000000
## 650       0.07333333
## 651       0.11492052
## 652       0.13397358
## 653       1.36191863
## 654       1.58652064
## 655       2.26954182
## 656       2.48726366
## 657       2.51786456
## 658       2.91050569
## 659       2.25122232
## 660       1.09642982
## 661       0.73781301
## 662       1.06052889
## 663       0.53397481
## 664       0.63035287
## 665       0.09881212
## 666       0.11050549
## 667       0.03754032
## 668       0.00000000
## 669       0.00000000
## 670       0.00000000
## 671       0.00000000
## 672       0.00000000
## 673       0.00000000
## 674       0.00000000
## 675       0.00000000
## 676       0.00000000
## 677       0.00000000
## 678       0.02000000
## 679       0.07297571
## 680       0.24664706
## 681       0.57726440
## 682       0.42999030
## 683       1.25945514
## 684       0.94466864
## 685       0.76116948
## 686       0.70789956
## 687       1.00959426
## 688       0.44306012
## 689       0.71795132
## 690       0.60214548
## 691       0.30874703
## 692       0.20656157
## 693       0.18878636
## 694       0.02666667
## 695       0.07509752
## 696       0.00000000
## 697       0.00000000
## 698       0.00000000
## 699       0.00000000
## 700       0.00000000
## 701       0.00000000
## 702       0.00000000
## 703       0.00000000
## 704       0.00000000
## 705       0.03500000
## 706       0.02666667
## 707       0.08644444
## 708       0.17927806
## 709       0.00000000
## 710       0.36272176
## 711       0.46645171
## 712       1.20912696
## 713       0.20458442
## 714       0.70209905
## 715       0.99072785
## 716       0.28816170
## 717       0.27773572
## 718       0.25416747
## 719       0.16241135
## 720       0.07013072
## 721       0.00000000
## 722       0.02000000
## 723       0.05333333
## 724       0.06988304
## 725       0.00000000
## 726       0.00000000
## 727       0.00000000
## 728       0.00000000
## 729       0.00000000
## 730       0.00000000
## 731       0.00000000
## 732       0.00000000
## 733       0.00000000
## 734       0.00000000
## 735       0.00000000
## 736       0.03667519
## 737       0.41402893
## 738       0.15761089
## 739       0.15471774
## 740       0.00000000
## 741       0.06166667
## 742       0.18717949
## 743       0.22528407
## 744       0.15179487
## 745       0.00000000
## 746       0.08625185
## 747       0.02000000
## 748       0.03851852
## 749       0.06666667
## 750       0.00000000
## 751       0.00000000
## 752       0.00000000
## 753       0.00000000
## 754       0.00000000
## 755       0.00000000
## 756       0.00000000
## 757       0.00000000
## 758       0.00000000
## 759       0.00000000
## 760       0.00000000
## 761       0.00000000
## 762       0.00000000
## 763       0.00000000
## 764       0.00000000
## 765       0.00000000
## 766       0.00000000
## 767       0.00000000
## 768       0.00000000
## 769       0.00000000
## 770       0.00000000
## 771       0.00000000
## 772       0.00000000
## 773       0.03000000
## 774       0.00000000
## 775       0.00000000
## 776       0.00000000
## 777       0.00000000
## 778       0.00000000
## 779       0.00000000
## 780       0.00000000
## 781       0.00000000
## 782       0.00000000
## 783       0.00000000
## 784       0.00000000
image(matrix(imp, 28, 28))

p_max <- predict(fit_knn, x_test[,col_index])
p_max <- apply(p_max, 1, max)
ind  <- which(y_hat_knn != y_test)
ind <- ind[order(p_max[ind], decreasing = TRUE)]
rafalib::mypar(3,4)
for(i in ind[1:12]){
    image(matrix(x_test[i,], 28, 28)[, 28:1],
                 main = paste0("Pr(",y_hat_knn[i],")=",round(p_max[i], 2),
                                                                        " but is a ",y_test[i]),
                 xaxt="n", yaxt="n")
}

p_max <- predict(fit_rf, x_test[,col_index])$census  
p_max <- p_max / rowSums(p_max)
p_max <- apply(p_max, 1, max)
ind  <- which(y_hat_rf != y_test)
ind <- ind[order(p_max[ind], decreasing = TRUE)]
rafalib::mypar(3,4)
for(i in ind[1:12]){
    image(matrix(x_test[i,], 28, 28)[, 28:1], 
                 main = paste0("Pr(",y_hat_rf[i],")=",round(p_max[i], 2),
                               " but is a ",y_test[i]),
                 xaxt="n", yaxt="n")
}

##Ensemble

p_rf <- predict(fit_rf, x_test[,col_index])$census
p_rf <- p_rf / rowSums(p_rf)
p_knn <- predict(fit_knn, x_test[,col_index])
p <- (p_rf + p_knn)/2
y_pred <- factor(apply(p, 1, which.max)-1)
confusionMatrix(y_pred, y_test)
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction   0   1   2   3   4   5   6   7   8   9
##          0 106   0   0   0   0   1   2   0   0   1
##          1   0 111   0   0   0   1   2   1   1   0
##          2   0   1  91   0   0   0   0   0   1   1
##          3   0   0   0 102   0   4   1   0   2   0
##          4   0   0   0   0  83   0   0   0   1   4
##          5   0   0   0   5   0  83   1   0   0   1
##          6   1   0   1   1   0   2 101   0   3   0
##          7   1   0   4   0   0   0   0  96   0   0
##          8   0   0   0   0   1   2   0   0  89   1
##          9   0   0   0   0   1   1   0   2   0  86
## 
## Overall Statistics
##                                           
##                Accuracy : 0.948           
##                  95% CI : (0.9324, 0.9609)
##     No Information Rate : 0.112           
##     P-Value [Acc > NIR] : < 2.2e-16       
##                                           
##                   Kappa : 0.9422          
##                                           
##  Mcnemar's Test P-Value : NA              
## 
## Statistics by Class:
## 
##                      Class: 0 Class: 1 Class: 2 Class: 3 Class: 4 Class: 5
## Sensitivity            0.9815   0.9911   0.9479   0.9444   0.9765   0.8830
## Specificity            0.9955   0.9944   0.9967   0.9922   0.9945   0.9923
## Pos Pred Value         0.9636   0.9569   0.9681   0.9358   0.9432   0.9222
## Neg Pred Value         0.9978   0.9989   0.9945   0.9933   0.9978   0.9879
## Prevalence             0.1080   0.1120   0.0960   0.1080   0.0850   0.0940
## Detection Rate         0.1060   0.1110   0.0910   0.1020   0.0830   0.0830
## Detection Prevalence   0.1100   0.1160   0.0940   0.1090   0.0880   0.0900
## Balanced Accuracy      0.9885   0.9927   0.9723   0.9683   0.9855   0.9376
##                      Class: 6 Class: 7 Class: 8 Class: 9
## Sensitivity            0.9439   0.9697   0.9175   0.9149
## Specificity            0.9910   0.9945   0.9956   0.9956
## Pos Pred Value         0.9266   0.9505   0.9570   0.9556
## Neg Pred Value         0.9933   0.9967   0.9912   0.9912
## Prevalence             0.1070   0.0990   0.0970   0.0940
## Detection Rate         0.1010   0.0960   0.0890   0.0860
## Detection Prevalence   0.1090   0.1010   0.0930   0.0900
## Balanced Accuracy      0.9675   0.9821   0.9565   0.9552

ENSEMBLE COM VARIOS MODELOS

models <- c("glm", "lda", "naive_bayes", "svmLinear", "knn", "gamLoess", "multinom", "qda", "rf", "adaboost")
set.seed(1) # use `set.seed(1, sample.kind = "Rounding")` in R 3.6 or later
data("mnist_27")
fits <- lapply(models, function(model){ 
    print(model)
    train(y ~ ., method = model, data = mnist_27$train)
})                                                                     #Treinar os Modelos
## [1] "glm"
## [1] "lda"
## [1] "naive_bayes"
## [1] "svmLinear"
## [1] "knn"
## [1] "gamLoess"
## Loading required package: gam
## Loading required package: splines
## Loading required package: foreach
## 
## Attaching package: 'foreach'
## The following objects are masked from 'package:purrr':
## 
##     accumulate, when
## Loaded gam 1.16.1
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53846
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.53555
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.53555
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.46667
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.43969
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53846
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.53555
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.53555
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.46667
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.43969
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53846
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.50205
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.51111
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.50205
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.50205
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53333
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.50205
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.50588
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.50205
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.089286
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.092316
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53846
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.53555
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.53555
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.46667
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.43969
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored

## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.54071
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.089286
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.092316
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53846
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.51322
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.51322
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53333
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.51322
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.54071
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.54071
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.46667
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.43969
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.089286
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.10703
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.094737
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.10703
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.089286
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.10723
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.094737
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.10723
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.54061
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.46667
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.43969
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53846
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.53555
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.53555
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53846
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.51322
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.51322
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.53333
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.51322
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.089286
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.092316
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored

## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.46667
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.43969
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.089286
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.10703
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.094737
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.10703
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.54071
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.46667
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.40628
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.41379
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.40628
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.4375
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.40628
## Warning in gam.lo(data[["lo(x_1, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.089286
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.10761
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.094737
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : lowerlimit 0.10761
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : eval 0.57895
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : upperlimit 0.54061
## Warning in gam.lo(data[["lo(x_2, span = 0.5, degree = 1)"]], z, w, span =
## 0.5, : extrapolation not allowed with blending
## Warning in model.matrix.default(mt, mf, contrasts): non-list contrasts
## argument ignored
## [1] "multinom"
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 340.447413
## final  value 340.447361 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 385.009822 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 340.515756
## final  value 340.515703 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 355.491548
## final  value 355.334096 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 399.994557 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 355.560397
## final  value 355.404046 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 332.977945
## final  value 332.776189 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 383.652420 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 333.057166
## final  value 332.857086 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 360.815668
## iter  10 value 360.815664
## iter  10 value 360.815664
## final  value 360.815664 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 401.991029 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 360.877678
## iter  10 value 360.877674
## iter  10 value 360.877673
## final  value 360.877673 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 353.445113
## final  value 353.445099 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 397.620330 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 353.512471
## final  value 353.512457 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 384.705847 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 419.872138 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 384.756753 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 376.311956
## final  value 376.311945 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 414.533042 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 376.368612
## final  value 376.368601 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 348.115740
## final  value 348.115719 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 394.780931 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 348.188604
## final  value 348.188583 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 371.294670
## final  value 371.294648 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 407.162776 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 371.346769
## final  value 371.346747 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 340.460748
## final  value 340.460735 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 385.329076 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 340.529383
## final  value 340.529370 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 369.071134 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 409.102586 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 369.131201 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 353.789756
## final  value 353.789729 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 397.213336 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 353.855907
## final  value 353.855880 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 349.900015
## final  value 349.900003 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 395.063948 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 349.969829
## final  value 349.969816 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 340.283330
## final  value 340.113998 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 388.970784 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 340.356087
## final  value 340.192140 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 356.142461
## iter  10 value 356.142460
## iter  10 value 356.142460
## final  value 356.142460 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 400.364737 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 356.210409
## iter  10 value 356.210409
## iter  10 value 356.210408
## final  value 356.210408 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 366.460859
## final  value 366.457990 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 408.726455 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 366.525723
## final  value 366.522943 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 345.681221
## final  value 345.681216 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 391.157784 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 345.751777
## final  value 345.751772 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 381.951887 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 417.689638 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 382.004339 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 374.392494
## iter  10 value 374.392494
## iter  10 value 374.392494
## final  value 374.392494 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 411.255312 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 374.446101
## iter  10 value 374.446101
## iter  10 value 374.446101
## final  value 374.446101 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 352.792114
## iter  10 value 352.792111
## iter  10 value 352.792111
## final  value 352.792111 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 400.589977 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 352.868524
## iter  10 value 352.868522
## iter  10 value 352.868522
## final  value 352.868522 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 367.459858 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 410.336503 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 367.526043 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 361.928018 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 401.510577 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 361.987362 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 353.664516
## final  value 353.664502 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 398.446936 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 353.733429
## final  value 353.733415 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 376.662697
## final  value 376.662654 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 413.902664 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 376.717948
## final  value 376.717905 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 364.624146
## iter  10 value 364.624145
## iter  10 value 364.624145
## final  value 364.624145 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 402.783558 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## iter  10 value 364.680086
## iter  10 value 364.680086
## iter  10 value 364.680086
## final  value 364.680086 
## converged
## # weights:  4 (3 variable)
## initial  value 554.517744 
## final  value 401.160559 
## converged
## [1] "qda"
## [1] "rf"
## note: only 1 unique complexity parameters in default grid. Truncating the grid to 1 .
## 
## [1] "adaboost"
names(fits) <- models

pred <- sapply(fits, function(object) 
    predict(object, newdata = mnist_27$test))                            #Matriz de Previsoes de cada modelo para o conjunto do teste


acc_hat <- sapply(fits, function(fit) min(fit$results$Accuracy))
M=mean(acc_hat)                                                        #média das estimativas de precisão do conjunto de treino


ind <- acc_hat >= M                                       #Selecionar só os modelos que contém as estimativas de previsoes acima da média do treino
votes <- rowMeans(pred[,ind] == "7")
y_hat <- ifelse(votes>=0.5, 7, 2)%>% 
            factor(levels = levels(mnist_27$test$y))
mean(y_hat == mnist_27$test$y)
## [1] 0.815
confusionMatrix(data=y_hat, reference=mnist_27$test$y)    #Matriz de Confusão
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction  2  7
##          2 89 20
##          7 17 74
##                                           
##                Accuracy : 0.815           
##                  95% CI : (0.7541, 0.8663)
##     No Information Rate : 0.53            
##     P-Value [Acc > NIR] : <2e-16          
##                                           
##                   Kappa : 0.628           
##                                           
##  Mcnemar's Test P-Value : 0.7423          
##                                           
##             Sensitivity : 0.8396          
##             Specificity : 0.7872          
##          Pos Pred Value : 0.8165          
##          Neg Pred Value : 0.8132          
##              Prevalence : 0.5300          
##          Detection Rate : 0.4450          
##    Detection Prevalence : 0.5450          
##       Balanced Accuracy : 0.8134          
##                                           
##        'Positive' Class : 2               
## 
data("tissue_gene_expression")
dim(tissue_gene_expression$x)
## [1] 189 500