Brayan Ivan Cruz Corona

13001595

Fiabilidad 2018

8.3 Lab: Decision Trees

8.3.1 Fitting Classification trees

Cargamos las librerias necesarias:

library(tree)
package ‘tree’ was built under R version 3.4.4
library(ISLR)
attach(Carseats)
High=ifelse(Sales<=8, "No","Yes")
Carseats = data.frame(Carseats,High)

Utilizacion de la funcion tree():

summary(tree.carseats)

Classification tree:
tree(formula = High ~ . - Sales, data = Carseats)
Variables actually used in tree construction:
[1] "ShelveLoc"   "Price"       "Income"     
[4] "CompPrice"   "Population"  "Advertising"
[7] "Age"         "US"         
Number of terminal nodes:  27 
Residual mean deviance:  0.4575 = 170.7 / 373 
Misclassification error rate: 0.09 = 36 / 400 

Graficamos tree.carseats

plot(tree.carseats )
text(tree.carseats ,pretty =0)

Las ramas que perfilan a ser nodos terminales se indican con *

tree.carseats
node), split, n, deviance, yval, (yprob)
      * denotes terminal node

  1) root 400 541.500 No ( 0.59000 0.41000 )  
    2) ShelveLoc: Bad,Medium 315 390.600 No ( 0.68889 0.31111 )  
      4) Price < 92.5 46  56.530 Yes ( 0.30435 0.69565 )  
        8) Income < 57 10  12.220 No ( 0.70000 0.30000 )  
         16) CompPrice < 110.5 5   0.000 No ( 1.00000 0.00000 ) *
         17) CompPrice > 110.5 5   6.730 Yes ( 0.40000 0.60000 ) *
        9) Income > 57 36  35.470 Yes ( 0.19444 0.80556 )  
         18) Population < 207.5 16  21.170 Yes ( 0.37500 0.62500 ) *
         19) Population > 207.5 20   7.941 Yes ( 0.05000 0.95000 ) *
      5) Price > 92.5 269 299.800 No ( 0.75465 0.24535 )  
       10) Advertising < 13.5 224 213.200 No ( 0.81696 0.18304 )  
         20) CompPrice < 124.5 96  44.890 No ( 0.93750 0.06250 )  
           40) Price < 106.5 38  33.150 No ( 0.84211 0.15789 )  
             80) Population < 177 12  16.300 No ( 0.58333 0.41667 )  
              160) Income < 60.5 6   0.000 No ( 1.00000 0.00000 ) *
              161) Income > 60.5 6   5.407 Yes ( 0.16667 0.83333 ) *
             81) Population > 177 26   8.477 No ( 0.96154 0.03846 ) *
           41) Price > 106.5 58   0.000 No ( 1.00000 0.00000 ) *
         21) CompPrice > 124.5 128 150.200 No ( 0.72656 0.27344 )  
           42) Price < 122.5 51  70.680 Yes ( 0.49020 0.50980 )  
             84) ShelveLoc: Bad 11   6.702 No ( 0.90909 0.09091 ) *
             85) ShelveLoc: Medium 40  52.930 Yes ( 0.37500 0.62500 )  
              170) Price < 109.5 16   7.481 Yes ( 0.06250 0.93750 ) *
              171) Price > 109.5 24  32.600 No ( 0.58333 0.41667 )  
                342) Age < 49.5 13  16.050 Yes ( 0.30769 0.69231 ) *
                343) Age > 49.5 11   6.702 No ( 0.90909 0.09091 ) *
           43) Price > 122.5 77  55.540 No ( 0.88312 0.11688 )  
             86) CompPrice < 147.5 58  17.400 No ( 0.96552 0.03448 ) *
             87) CompPrice > 147.5 19  25.010 No ( 0.63158 0.36842 )  
              174) Price < 147 12  16.300 Yes ( 0.41667 0.58333 )  
                348) CompPrice < 152.5 7   5.742 Yes ( 0.14286 0.85714 ) *
                349) CompPrice > 152.5 5   5.004 No ( 0.80000 0.20000 ) *
              175) Price > 147 7   0.000 No ( 1.00000 0.00000 ) *
       11) Advertising > 13.5 45  61.830 Yes ( 0.44444 0.55556 )  
         22) Age < 54.5 25  25.020 Yes ( 0.20000 0.80000 )  
           44) CompPrice < 130.5 14  18.250 Yes ( 0.35714 0.64286 )  
             88) Income < 100 9  12.370 No ( 0.55556 0.44444 ) *
             89) Income > 100 5   0.000 Yes ( 0.00000 1.00000 ) *
           45) CompPrice > 130.5 11   0.000 Yes ( 0.00000 1.00000 ) *
         23) Age > 54.5 20  22.490 No ( 0.75000 0.25000 )  
           46) CompPrice < 122.5 10   0.000 No ( 1.00000 0.00000 ) *
           47) CompPrice > 122.5 10  13.860 No ( 0.50000 0.50000 )  
             94) Price < 125 5   0.000 Yes ( 0.00000 1.00000 ) *
             95) Price > 125 5   0.000 No ( 1.00000 0.00000 ) *
    3) ShelveLoc: Good 85  90.330 Yes ( 0.22353 0.77647 )  
      6) Price < 135 68  49.260 Yes ( 0.11765 0.88235 )  
       12) US: No 17  22.070 Yes ( 0.35294 0.64706 )  
         24) Price < 109 8   0.000 Yes ( 0.00000 1.00000 ) *
         25) Price > 109 9  11.460 No ( 0.66667 0.33333 ) *
       13) US: Yes 51  16.880 Yes ( 0.03922 0.96078 ) *
      7) Price > 135 17  22.070 No ( 0.64706 0.35294 )  
       14) Income < 46 6   0.000 No ( 1.00000 0.00000 ) *
       15) Income > 46 11  15.160 Yes ( 0.45455 0.54545 ) *
set.seed (2)
train=sample (1: nrow(Carseats ), 200)
Carseats.test=Carseats [-train ,]
High.test=High[-train ]
tree.carseats =tree(High~.-Sales ,Carseats ,subset =train )
tree.pred=predict(tree.carseats ,Carseats.test ,type ="class")
table(tree.pred ,High.test)
         High.test
tree.pred No Yes
      No  86  27
      Yes 30  57
(86+57)/200
[1] 0.715
set.seed (3)
cv.carseats =cv.tree(tree.carseats ,FUN=prune.misclass )
names(cv.carseats)
[1] "size"   "dev"    "k"      "method"
(cv.carseats)
$size
[1] 19 17 14 13  9  7  3  2  1

$dev
[1] 55 55 53 52 50 56 69 65 80

$k
[1]       -Inf  0.0000000  0.6666667  1.0000000  1.7500000
[6]  2.0000000  4.2500000  5.0000000 23.0000000

$method
[1] "misclass"

attr(,"class")
[1] "prune"         "tree.sequence"
par(mfrow =c(1,2))
plot(cv.carseats$size ,cv.carseats$dev ,type="b")
plot(cv.carseats$k ,cv.carseats$dev ,type="b")

prune.carseats =prune.misclass (tree.carseats ,best =9)
plot(prune.carseats )
text(prune.carseats ,pretty =0)

Volvemos a aplicar la prediccion para verificar las mejoras al modelo:

tree.pred=predict (prune.carseats , Carseats.test ,type="class")
table(tree.pred ,High.test)
         High.test
tree.pred No Yes
      No  94  24
      Yes 22  60
(94+60)/200
[1] 0.77

Verificamos que pasa si aumentamos el valor de “best”:

prune.carseats =prune.misclass (tree.carseats ,best =15)
plot(prune.carseats )
text(prune.carseats ,pretty =0)

tree.pred=predict (prune.carseats , Carseats.test ,type="class")
table(tree.pred ,High.test)
         High.test
tree.pred No Yes
      No  86  22
      Yes 30  62
(86+62)/200
[1] 0.74

Los datos clasificados son mucho menores.

8.3.2 Fitting Regression Trees
library (MASS)
set.seed (1)
train = sample (1: nrow(Boston ), nrow(Boston )/2)
tree.boston =tree(medv~.,Boston ,subset =train)
summary (tree.boston)

Regression tree:
tree(formula = medv ~ ., data = Boston, subset = train)
Variables actually used in tree construction:
[1] "lstat" "rm"    "dis"  
Number of terminal nodes:  8 
Residual mean deviance:  12.65 = 3099 / 245 
Distribution of residuals:
     Min.   1st Qu.    Median      Mean   3rd Qu.      Max. 
-14.10000  -2.04200  -0.05357   0.00000   1.96000  12.60000 
plot(tree.boston )
text(tree.boston ,pretty =0)

cv.boston =cv.tree(tree.boston)
plot(cv.boston$size ,cv.boston$dev ,type='b')

prune.boston =prune.tree(tree.boston ,best =5)
plot(prune.boston )
text(prune.boston ,pretty =0)

yhat=predict (tree.boston ,newdata =Boston [-train ,])
boston.test=Boston [-train ," medv"]
plot(yhat ,boston.test)
abline (0,1)

mean((yhat-boston.test)^2)
[1] NaN
8.3.3 Bagging and Random forests
library (randomForest)
package ‘randomForest’ was built under R version 3.4.4randomForest 4.6-14
Type rfNews() to see new features/changes/bug fixes.
set.seed (1)
bag.boston =randomForest(medv~.,data=Boston ,subset =train ,
mtry=13, importance =TRUE)
bag.boston

Call:
 randomForest(formula = medv ~ ., data = Boston, mtry = 13, importance = TRUE,      subset = train) 
               Type of random forest: regression
                     Number of trees: 500
No. of variables tried at each split: 13

          Mean of squared residuals: 11.15723
                    % Var explained: 86.49
yhat.bag = predict (bag.boston ,newdata =Boston [-train ,])
plot(yhat.bag , boston.test)
abline (0,1)

mean(( yhat.bag -boston.test)^2)
[1] NaN
bag.boston =randomForest(medv~.,data=Boston ,subset =train ,
mtry=13, ntree =25)
yhat.bag = predict (bag.boston ,newdata =Boston [-train ,])

Utilizando un valor menor de mtry:

rf.boston =randomForest(medv~.,data=Boston ,subset =train ,
mtry=6, importance =TRUE)
yhat.rf = predict (rf.boston ,newdata =Boston [-train ,])

Utilizamos la funcion de Importancia:

importance (rf.boston )
          %IncMSE IncNodePurity
crim    12.737863    1073.37722
zn       3.236628      45.82059
indus    8.120422    1057.00386
chas     2.657232      64.85290
nox     12.600970    1012.56120
rm      32.050830    6712.16728
age     10.363822     553.96843
dis     13.871202    1346.69822
rad      3.682449      87.34525
tax      9.674698     438.10378
ptratio 12.020855     835.18327
black    6.853937     338.71460
lstat   28.260594    6948.44911

Graficamos:

varImpPlot (rf.boston )

8.3.4 Boosting
library (gbm)
set.seed (1)
boost.boston =gbm(medv~.,data=Boston [train ,], distribution=
"gaussian",n.trees =5000 , interaction.depth =4)

Desplegamos el summary:

summary(boost.boston)

par(mfrow =c(1,2))
plot(boost.boston ,i="rm")
plot(boost.boston ,i="lstat")

yhat.boost=predict (boost.boston ,newdata =Boston [-train ,],
n.trees =5000)
boost.boston =gbm(medv~.,data=Boston [train ,], distribution=
"gaussian",n.trees =5000 , interaction.depth =4, shrinkage =0.2,
verbose =F)
yhat.boost=predict (boost.boston ,newdata =Boston [-train ,],
n.trees =5000)
mean(( yhat.boost -boston.test)^2)
[1] NaN
LS0tCnRpdGxlOiAiTGFib3JhdG9yaW8gIzUgLSAoTcOpdG9kb3MgQmFzYWRvcyBlbiBBcmJvbGVzKSIKb3V0cHV0OiBodG1sX25vdGVib29rCi0tLQoKIyMjI0JyYXlhbiBJdmFuIENydXogQ29yb25hCiMjIyMxMzAwMTU5NQojIyMjRmlhYmlsaWRhZCAyMDE4CgojIyMjOC4zIExhYjogRGVjaXNpb24gVHJlZXMKIyMjIyM4LjMuMSBGaXR0aW5nIENsYXNzaWZpY2F0aW9uIHRyZWVzCgpDYXJnYW1vcyBsYXMgbGlicmVyaWFzIG5lY2VzYXJpYXM6CgpgYGB7cn0KbGlicmFyeSh0cmVlKQpsaWJyYXJ5KElTTFIpCmF0dGFjaChDYXJzZWF0cykKSGlnaD1pZmVsc2UoU2FsZXM8PTgsICJObyIsIlllcyIpCmBgYAoKCmBgYHtyfQpDYXJzZWF0cyA9IGRhdGEuZnJhbWUoQ2Fyc2VhdHMsSGlnaCkKYGBgCgpVdGlsaXphY2lvbiBkZSBsYSBmdW5jaW9uIHRyZWUoKToKCmBgYHtyfQp0cmVlLmNhcnNlYXRzID0gdHJlZShIaWdofi4tU2FsZXMsQ2Fyc2VhdHMpCnN1bW1hcnkodHJlZS5jYXJzZWF0cykKYGBgCgpHcmFmaWNhbW9zIHRyZWUuY2Fyc2VhdHMKCgpgYGB7cn0KcGxvdCh0cmVlLmNhcnNlYXRzICkKdGV4dCh0cmVlLmNhcnNlYXRzICxwcmV0dHkgPTApCmBgYAoKTGFzIHJhbWFzIHF1ZSBwZXJmaWxhbiBhIHNlciBub2RvcyB0ZXJtaW5hbGVzIHNlIGluZGljYW4gY29uICoKCmBgYHtyfQp0cmVlLmNhcnNlYXRzCmBgYAoKCmBgYHtyfQpzZXQuc2VlZCAoMikKdHJhaW49c2FtcGxlICgxOiBucm93KENhcnNlYXRzICksIDIwMCkKQ2Fyc2VhdHMudGVzdD1DYXJzZWF0cyBbLXRyYWluICxdCkhpZ2gudGVzdD1IaWdoWy10cmFpbiBdCnRyZWUuY2Fyc2VhdHMgPXRyZWUoSGlnaH4uLVNhbGVzICxDYXJzZWF0cyAsc3Vic2V0ID10cmFpbiApCnRyZWUucHJlZD1wcmVkaWN0KHRyZWUuY2Fyc2VhdHMgLENhcnNlYXRzLnRlc3QgLHR5cGUgPSJjbGFzcyIpCnRhYmxlKHRyZWUucHJlZCAsSGlnaC50ZXN0KQoKKDg2KzU3KS8yMDAKYGBgCgoKYGBge3J9CnNldC5zZWVkICgzKQpjdi5jYXJzZWF0cyA9Y3YudHJlZSh0cmVlLmNhcnNlYXRzICxGVU49cHJ1bmUubWlzY2xhc3MgKQpuYW1lcyhjdi5jYXJzZWF0cykKKGN2LmNhcnNlYXRzKQpgYGAKCgpgYGB7cn0KcGFyKG1mcm93ID1jKDEsMikpCnBsb3QoY3YuY2Fyc2VhdHMkc2l6ZSAsY3YuY2Fyc2VhdHMkZGV2ICx0eXBlPSJiIikKcGxvdChjdi5jYXJzZWF0cyRrICxjdi5jYXJzZWF0cyRkZXYgLHR5cGU9ImIiKQpwcnVuZS5jYXJzZWF0cyA9cHJ1bmUubWlzY2xhc3MgKHRyZWUuY2Fyc2VhdHMgLGJlc3QgPTkpCnBsb3QocHJ1bmUuY2Fyc2VhdHMgKQp0ZXh0KHBydW5lLmNhcnNlYXRzICxwcmV0dHkgPTApCmBgYAoKClZvbHZlbW9zIGEgYXBsaWNhciBsYSBwcmVkaWNjaW9uIHBhcmEgdmVyaWZpY2FyIGxhcyBtZWpvcmFzIGFsIG1vZGVsbzoKCmBgYHtyfQp0cmVlLnByZWQ9cHJlZGljdCAocHJ1bmUuY2Fyc2VhdHMgLCBDYXJzZWF0cy50ZXN0ICx0eXBlPSJjbGFzcyIpCnRhYmxlKHRyZWUucHJlZCAsSGlnaC50ZXN0KQooOTQrNjApLzIwMApgYGAKClZlcmlmaWNhbW9zIHF1ZSBwYXNhIHNpIGF1bWVudGFtb3MgZWwgdmFsb3IgZGUgImJlc3QiOgoKYGBge3J9CnBydW5lLmNhcnNlYXRzID1wcnVuZS5taXNjbGFzcyAodHJlZS5jYXJzZWF0cyAsYmVzdCA9MTUpCnBsb3QocHJ1bmUuY2Fyc2VhdHMgKQp0ZXh0KHBydW5lLmNhcnNlYXRzICxwcmV0dHkgPTApCnRyZWUucHJlZD1wcmVkaWN0IChwcnVuZS5jYXJzZWF0cyAsIENhcnNlYXRzLnRlc3QgLHR5cGU9ImNsYXNzIikKdGFibGUodHJlZS5wcmVkICxIaWdoLnRlc3QpCig4Nis2MikvMjAwCmBgYAoKTG9zIGRhdG9zIGNsYXNpZmljYWRvcyBzb24gbXVjaG8gbWVub3Jlcy4gCgoKIyMjIyM4LjMuMiBGaXR0aW5nIFJlZ3Jlc3Npb24gVHJlZXMKCmBgYHtyfQpsaWJyYXJ5IChNQVNTKQpzZXQuc2VlZCAoMSkKdHJhaW4gPSBzYW1wbGUgKDE6IG5yb3coQm9zdG9uICksIG5yb3coQm9zdG9uICkvMikKdHJlZS5ib3N0b24gPXRyZWUobWVkdn4uLEJvc3RvbiAsc3Vic2V0ID10cmFpbikKc3VtbWFyeSAodHJlZS5ib3N0b24pCmBgYAoKYGBge3J9CnBsb3QodHJlZS5ib3N0b24gKQp0ZXh0KHRyZWUuYm9zdG9uICxwcmV0dHkgPTApCmBgYAoKYGBge3J9CmN2LmJvc3RvbiA9Y3YudHJlZSh0cmVlLmJvc3RvbikKcGxvdChjdi5ib3N0b24kc2l6ZSAsY3YuYm9zdG9uJGRldiAsdHlwZT0nYicpCmBgYAoKCmBgYHtyfQpwcnVuZS5ib3N0b24gPXBydW5lLnRyZWUodHJlZS5ib3N0b24gLGJlc3QgPTUpCnBsb3QocHJ1bmUuYm9zdG9uICkKdGV4dChwcnVuZS5ib3N0b24gLHByZXR0eSA9MCkKYGBgCgoKYGBge3J9CnloYXQ9cHJlZGljdCAodHJlZS5ib3N0b24gLG5ld2RhdGEgPUJvc3RvbiBbLXRyYWluICxdKQpib3N0b24udGVzdD1Cb3N0b24gWy10cmFpbiAsIiBtZWR2Il0KcGxvdCh5aGF0ICxib3N0b24udGVzdCkKYWJsaW5lICgwLDEpCm1lYW4oKHloYXQtYm9zdG9uLnRlc3QpXjIpCmBgYAoKCiMjIyMjOC4zLjMgQmFnZ2luZyBhbmQgUmFuZG9tIGZvcmVzdHMKCmBgYHtyfQpsaWJyYXJ5IChyYW5kb21Gb3Jlc3QpCnNldC5zZWVkICgxKQpiYWcuYm9zdG9uID1yYW5kb21Gb3Jlc3QobWVkdn4uLGRhdGE9Qm9zdG9uICxzdWJzZXQgPXRyYWluICwKbXRyeT0xMywgaW1wb3J0YW5jZSA9VFJVRSkKYmFnLmJvc3RvbgpgYGAKCgpgYGB7cn0KeWhhdC5iYWcgPSBwcmVkaWN0IChiYWcuYm9zdG9uICxuZXdkYXRhID1Cb3N0b24gWy10cmFpbiAsXSkKcGxvdCh5aGF0LmJhZyAsIGJvc3Rvbi50ZXN0KQphYmxpbmUgKDAsMSkKbWVhbigoIHloYXQuYmFnIC1ib3N0b24udGVzdCleMikKYGBgCgpgYGB7cn0KYmFnLmJvc3RvbiA9cmFuZG9tRm9yZXN0KG1lZHZ+LixkYXRhPUJvc3RvbiAsc3Vic2V0ID10cmFpbiAsCm10cnk9MTMsIG50cmVlID0yNSkKeWhhdC5iYWcgPSBwcmVkaWN0IChiYWcuYm9zdG9uICxuZXdkYXRhID1Cb3N0b24gWy10cmFpbiAsXSkKCmBgYAoKVXRpbGl6YW5kbyB1biB2YWxvciBtZW5vciBkZSBtdHJ5OgoKYGBge3J9CnJmLmJvc3RvbiA9cmFuZG9tRm9yZXN0KG1lZHZ+LixkYXRhPUJvc3RvbiAsc3Vic2V0ID10cmFpbiAsCm10cnk9NiwgaW1wb3J0YW5jZSA9VFJVRSkKeWhhdC5yZiA9IHByZWRpY3QgKHJmLmJvc3RvbiAsbmV3ZGF0YSA9Qm9zdG9uIFstdHJhaW4gLF0pCmBgYAoKClV0aWxpemFtb3MgbGEgZnVuY2lvbiBkZSBJbXBvcnRhbmNpYToKCmBgYHtyfQppbXBvcnRhbmNlIChyZi5ib3N0b24gKQpgYGAKCkdyYWZpY2Ftb3M6IAoKYGBge3J9CnZhckltcFBsb3QgKHJmLmJvc3RvbiApCmBgYAoKCiMjIyMjOC4zLjQgQm9vc3RpbmcKCmBgYHtyfQpsaWJyYXJ5IChnYm0pCnNldC5zZWVkICgxKQpib29zdC5ib3N0b24gPWdibShtZWR2fi4sZGF0YT1Cb3N0b24gW3RyYWluICxdLCBkaXN0cmlidXRpb249CiJnYXVzc2lhbiIsbi50cmVlcyA9NTAwMCAsIGludGVyYWN0aW9uLmRlcHRoID00KQpgYGAKCkRlc3BsZWdhbW9zIGVsIHN1bW1hcnk6CgpgYGB7cn0Kc3VtbWFyeShib29zdC5ib3N0b24pCmBgYAoKYGBge3J9CnBhcihtZnJvdyA9YygxLDIpKQpwbG90KGJvb3N0LmJvc3RvbiAsaT0icm0iKQpwbG90KGJvb3N0LmJvc3RvbiAsaT0ibHN0YXQiKQpgYGAKCgpgYGB7cn0KeWhhdC5ib29zdD1wcmVkaWN0IChib29zdC5ib3N0b24gLG5ld2RhdGEgPUJvc3RvbiBbLXRyYWluICxdLApuLnRyZWVzID01MDAwKQpgYGAKCmBgYHtyfQpib29zdC5ib3N0b24gPWdibShtZWR2fi4sZGF0YT1Cb3N0b24gW3RyYWluICxdLCBkaXN0cmlidXRpb249CiJnYXVzc2lhbiIsbi50cmVlcyA9NTAwMCAsIGludGVyYWN0aW9uLmRlcHRoID00LCBzaHJpbmthZ2UgPTAuMiwKdmVyYm9zZSA9RikKeWhhdC5ib29zdD1wcmVkaWN0IChib29zdC5ib3N0b24gLG5ld2RhdGEgPUJvc3RvbiBbLXRyYWluICxdLApuLnRyZWVzID01MDAwKQptZWFuKCggeWhhdC5ib29zdCAtYm9zdG9uLnRlc3QpXjIpCmBgYAoK