Load Packages

library(caret)
library(randomForest)

Example 1: Student Success Data

ss <- read.table('data/student_success_data.csv', header=TRUE, sep=',')
ss <- na.omit(ss)
ss <- ss[(ss$G3 <= 20) & (ss$G3 >= 0), ]
ss$Passed <- factor(ifelse(ss$G3 < 10, 0, 1))
ss$G1 <- NULL
ss$G2 <- NULL
ss$G3 <- NULL
ss$absences <- NULL
summary(ss)
 school    sex          age        address famsize   Pstatus      Medu      
 GP :462   F:297   Min.   :15.00   R:155   GT3:393   A: 45   Min.   :1.000  
 MHS:101   M:266   1st Qu.:16.00   U:408   LE3:170   T:518   1st Qu.:2.000  
                   Median :16.00                             Median :3.000  
                   Mean   :16.61                             Mean   :2.874  
                   3rd Qu.:18.00                             3rd Qu.:4.000  
                   Max.   :22.00                             Max.   :4.000  
      Fedu             Mjob           Fjob            reason      guardian  
 Min.   :1.000   at_home : 66   at_home : 31   course    :215   father:129  
 1st Qu.:2.000   health  : 39   health  : 25   home      :158   mother:398  
 Median :3.000   other   :184   other   :275   other     : 48   other : 36  
 Mean   :2.686   services:182   services:173   reputation:142               
 3rd Qu.:4.000   teacher : 92   teacher : 59                                
 Max.   :4.000                                                              
   traveltime      studytime        failures      schoolsup famsup     paid     activities
 Min.   :1.000   Min.   :1.000   Min.   :0.0000   no :507   no :254   no :332   no :262   
 1st Qu.:1.000   1st Qu.:1.000   1st Qu.:0.0000   yes: 56   yes:309   yes:231   yes:301   
 Median :1.000   Median :2.000   Median :0.0000                                           
 Mean   :1.481   Mean   :1.986   Mean   :0.2842                                           
 3rd Qu.:2.000   3rd Qu.:2.000   3rd Qu.:0.0000                                           
 Max.   :4.000   Max.   :4.000   Max.   :3.0000                                           
 nursery   higher    internet  romantic      famrel         freetime         goout      
 no :102   no : 22   no : 93   no :341   Min.   :1.000   Min.   :1.000   Min.   :1.000  
 yes:461   yes:541   yes:470   yes:222   1st Qu.:4.000   1st Qu.:3.000   1st Qu.:2.000  
                                         Median :4.000   Median :3.000   Median :3.000  
                                         Mean   :3.938   Mean   :3.213   Mean   :3.021  
                                         3rd Qu.:5.000   3rd Qu.:4.000   3rd Qu.:4.000  
                                         Max.   :5.000   Max.   :5.000   Max.   :5.000  
      Dalc            Walc           health     Passed 
 Min.   :1.000   Min.   :1.000   Min.   :1.00   0:200  
 1st Qu.:1.000   1st Qu.:1.000   1st Qu.:3.00   1:363  
 Median :1.000   Median :2.000   Median :4.00          
 Mean   :1.401   Mean   :2.176   Mean   :3.67          
 3rd Qu.:2.000   3rd Qu.:3.000   3rd Qu.:5.00          
 Max.   :5.000   Max.   :5.000   Max.   :5.00          

Basic Example of a Random Forest

set.seed(1)
m1 <- randomForest(Passed ~ ., ss, importance=TRUE)
m1

Call:
 randomForest(formula = Passed ~ ., data = ss, importance = TRUE) 
               Type of random forest: classification
                     Number of trees: 500
No. of variables tried at each split: 5

        OOB estimate of  error rate: 16.7%
Confusion matrix:
    0   1 class.error
0 142  58  0.29000000
1  36 327  0.09917355

Tuning on Number of Trees

plot(m1)
legend("topright", cex =1, legend=colnames(m1$err.rate), lty=c(1,2,3), col=c(1,2,3))

m1_opt_ntrees <- which.min(m1$err.rate[,'OOB'])
m1_opt_err_rate <- min(m1$err.rate[,'OOB'])

cat("Optimal Number of Trees: ", m1_opt_ntrees, "\n",
    "Minimum Error Rate:      ", m1_opt_err_rate, sep="")
Optimal Number of Trees: 124
Minimum Error Rate:      0.1527531
set.seed(1)
m2 <- randomForest(Passed ~ ., ss, ntree=m1_opt_ntrees, importance=TRUE)
m2

Call:
 randomForest(formula = Passed ~ ., data = ss, ntree = m1_opt_ntrees,      importance = TRUE) 
               Type of random forest: classification
                     Number of trees: 124
No. of variables tried at each split: 5

        OOB estimate of  error rate: 15.28%
Confusion matrix:
    0   1 class.error
0 146  54  0.27000000
1  32 331  0.08815427

Training Accuracy

train_predict <- predict(m2, ss)
mean(train_predict == ss$Passed)
[1] 1

Tuning on mtry and ntree

oob_acc_list <- c()
opt_ntree_list <- c()

for(i in 1:29){
  set.seed(1)
  temp_mod <- randomForest(Passed ~ ., ss, ntree=500, importance=TRUE, mtry=i)
  oob_acc_list <- c(oob_acc_list, min(temp_mod$err.rate[,'OOB']))
  opt_ntree_list <- c(opt_ntree_list, which.min(temp_mod$err.rate[,'OOB']))
}

opt_mtry <- which.min(oob_acc_list)
opt_ntree <- opt_ntree_list[opt_mtry]
min_oob_acc <- min(oob_acc_list)

cat("Optimal Value of mtry:  ", opt_mtry, "\n",
    "Optimal Value of ntree: ", opt_ntree, "\n",
    "Minimum OOB Accuracy:   ", min_oob_acc, sep="")
Optimal Value of mtry:  3
Optimal Value of ntree: 445
Minimum OOB Accuracy:   0.1420959
plot(1:29, oob_acc_list, xlab="Value of mtry", ylab="Minimum OOB Accuracy Score")
lines(1:29, oob_acc_list)
abline(v=which.min(oob_acc_list), col="red", lty=2, lwd=1)

set.seed(1)
m3 <- randomForest(Passed ~ ., ss, ntree=445, mtry=3, importance=TRUE)
m3

Call:
 randomForest(formula = Passed ~ ., data = ss, ntree = 445, mtry = 3,      importance = TRUE) 
               Type of random forest: classification
                     Number of trees: 445
No. of variables tried at each split: 3

        OOB estimate of  error rate: 14.21%
Confusion matrix:
    0   1 class.error
0 145  55  0.27500000
1  25 338  0.06887052

Estimating Out-Of-Sample Performance

set.seed(1)
train(Passed ~ ., ss, method="rf", ntree=445,
      trControl = trainControl(method="cv", number=20), 
      tuneGrid = expand.grid(mtry=c(3)))
Random Forest 

563 samples
 29 predictor
  2 classes: '0', '1' 

No pre-processing
Resampling: Cross-Validated (20 fold) 
Summary of sample sizes: 535, 535, 535, 535, 535, 534, ... 
Resampling results:

  Accuracy   Kappa    
  0.8349754  0.6226192

Tuning parameter 'mtry' was held constant at a value of 3
set.seed(1)
randomForest(Passed ~ ., ss, ntree=445, mtry=3, importance=TRUE, 
  replace=FALSE, sampsize=floor(0.8*nrow(ss)))

Call:
 randomForest(formula = Passed ~ ., data = ss, ntree = 445, mtry = 3,      importance = TRUE, replace = FALSE, sampsize = floor(0.8 *          nrow(ss)))
               Type of random forest: classification
                     Number of trees: 445
No. of variables tried at each split: 3

        OOB estimate of  error rate: 14.92%
Confusion matrix:
    0   1 class.error
0 147  53  0.26500000
1  31 332  0.08539945
error_rate_list <- c()

for (i in 1:20){

  set.seed(i)
  temp_mod <- randomForest(Passed ~ ., ss, ntree=445, mtry=3, importance=TRUE, 
                           replace=FALSE, sampsize=floor(0.8*nrow(ss)))
  
  error_rate_list <- c(error_rate_list, temp_mod$err.rate[445,"OOB"])
}

mean(error_rate_list)
[1] 0.1590586
boxplot(error_rate_list)

Feature Importance

m3$importance
                       0            1 MeanDecreaseAccuracy MeanDecreaseGini
school      0.0111720672 0.0025577942          0.005694658         3.155759
sex         0.0233632155 0.0012521691          0.009095097         5.437597
age         0.0264599152 0.0032338034          0.011469429        11.435674
address     0.0149627498 0.0023202583          0.006837335         4.639802
famsize     0.0199481289 0.0008092124          0.007596645         4.387064
Pstatus     0.0046869379 0.0001193006          0.001765697         2.380665
Medu        0.0541793952 0.0174901597          0.030577784        14.968563
Fedu        0.0491084012 0.0177485314          0.028881618        15.012602
Mjob        0.0599594814 0.0131151192          0.029743839        17.688261
Fjob        0.0293255539 0.0039668412          0.013065376        10.509067
reason      0.0354658291 0.0025441594          0.014226229        11.883354
guardian    0.0156615270 0.0019149910          0.006817719         6.183318
traveltime  0.0184574985 0.0006176917          0.006975296         6.389089
studytime   0.0255475734 0.0029151586          0.010989474         9.277214
failures    0.0492611771 0.0219989065          0.031765734        16.271369
schoolsup  -0.0004538367 0.0037351486          0.002217851         3.580239
famsup      0.0399601697 0.0120905935          0.021994149         7.981877
paid        0.0226342909 0.0050237302          0.011328281         5.063839
activities  0.0156916157 0.0038482942          0.008056800         5.261459
nursery     0.0085281869 0.0028213463          0.004885437         4.128403
higher      0.0031016391 0.0013766224          0.001956130         2.496847
internet    0.0218552420 0.0046808013          0.010802754         5.095232
romantic    0.0175522927 0.0052956381          0.009702732         4.948335
famrel      0.0382173216 0.0047969335          0.016753294        10.655698
freetime    0.0336506282 0.0042443630          0.014790784        11.351832
goout       0.0454265167 0.0091057532          0.022041033        14.025130
Dalc        0.0188676402 0.0076632578          0.011721844         7.596320
Walc        0.0266031162 0.0072780700          0.014199180        11.637881
health      0.0290641642 0.0062458468          0.014451498        11.363448
varImpPlot(m3)

Example 2: Ames Housing

ames <- read.table('data/AmesHousing.txt', header=TRUE, sep='\t')
sapply(ames, function(x) sum(is.na(x)))
          Order             PID     MS.SubClass       MS.Zoning    Lot.Frontage 
              0               0               0               0             490 
       Lot.Area          Street           Alley       Lot.Shape    Land.Contour 
              0               0            2732               0               0 
      Utilities      Lot.Config      Land.Slope    Neighborhood     Condition.1 
              0               0               0               0               0 
    Condition.2       Bldg.Type     House.Style    Overall.Qual    Overall.Cond 
              0               0               0               0               0 
     Year.Built  Year.Remod.Add      Roof.Style       Roof.Matl    Exterior.1st 
              0               0               0               0               0 
   Exterior.2nd    Mas.Vnr.Type    Mas.Vnr.Area      Exter.Qual      Exter.Cond 
              0               0              23               0               0 
     Foundation       Bsmt.Qual       Bsmt.Cond   Bsmt.Exposure  BsmtFin.Type.1 
              0              79              79              79              79 
   BsmtFin.SF.1  BsmtFin.Type.2    BsmtFin.SF.2     Bsmt.Unf.SF   Total.Bsmt.SF 
              1              79               1               1               1 
        Heating      Heating.QC     Central.Air      Electrical     X1st.Flr.SF 
              0               0               0               0               0 
    X2nd.Flr.SF Low.Qual.Fin.SF     Gr.Liv.Area  Bsmt.Full.Bath  Bsmt.Half.Bath 
              0               0               0               2               2 
      Full.Bath       Half.Bath   Bedroom.AbvGr   Kitchen.AbvGr    Kitchen.Qual 
              0               0               0               0               0 
  TotRms.AbvGrd      Functional      Fireplaces    Fireplace.Qu     Garage.Type 
              0               0               0            1422             157 
  Garage.Yr.Blt   Garage.Finish     Garage.Cars     Garage.Area     Garage.Qual 
            159             157               1               1             158 
    Garage.Cond     Paved.Drive    Wood.Deck.SF   Open.Porch.SF  Enclosed.Porch 
            158               0               0               0               0 
    X3Ssn.Porch    Screen.Porch       Pool.Area         Pool.QC           Fence 
              0               0               0            2917            2358 
   Misc.Feature        Misc.Val         Mo.Sold         Yr.Sold       Sale.Type 
           2824               0               0               0               0 
 Sale.Condition       SalePrice 
              0               0 
ames$Order <- NULL
ames$PID <- NULL
ames$Lot.Frontage <- NULL
ames$Alley <- NULL
ames$Misc.Feature <- NULL
ames$Pool.QC <- NULL
ames$Fireplace.Qu <- NULL
ames$Fence <- NULL
ames <- na.omit(ames)
nrow(ames)
[1] 2683

Elasticnet Model

set.seed(1)
en_mod <- train(SalePrice ~ ., ames, method="glmnet", metric="Rsquared", 
                trControl = trainControl(method="cv", number=10),
                tuneGrid = expand.grid(alpha=seq(0, 1, by=0.2), 
                                        lambda=exp(seq(2,14,length=100))
                                       )
                )
                 

best_ix <- which.max(en_mod$results$Rsquared)
en_mod$results[best_ix, ]
plot(en_mod, pch="", xTrans=log)

Random Forest Model

set.seed(1)
m4 <- randomForest(SalePrice ~ ., ames, importance=TRUE)
m4

Call:
 randomForest(formula = SalePrice ~ ., data = ames, importance = TRUE) 
               Type of random forest: regression
                     Number of trees: 500
No. of variables tried at each split: 24

          Mean of squared residuals: 607927012
                    % Var explained: 90.37
plot(m4)

Feature Importance

varImpPlot(m4)

LS0tDQp0aXRsZTogIkxlc3NvbiA4LjIgLSBSYW5kb20gRm9yZXN0cyINCmF1dGhvcjogIlJvYmJpZSBCZWFuZSINCm91dHB1dDoNCiAgaHRtbF9ub3RlYm9vazoNCiAgICB0aGVtZTogZmxhdGx5DQogICAgdG9jOiB5ZXMNCiAgICB0b2NfZGVwdGg6IDQNCi0tLQ0KDQojIyMgKipMb2FkIFBhY2thZ2VzKioNCg0KYGBge3IsIG1lc3NhZ2U9RkFMU0V9DQpsaWJyYXJ5KGNhcmV0KQ0KbGlicmFyeShyYW5kb21Gb3Jlc3QpDQpgYGANCg0KIyMjICoqRXhhbXBsZSAxOiBTdHVkZW50IFN1Y2Nlc3MgRGF0YSoqDQoNCmBgYHtyfQ0Kc3MgPC0gcmVhZC50YWJsZSgnZGF0YS9zdHVkZW50X3N1Y2Nlc3NfZGF0YS5jc3YnLCBoZWFkZXI9VFJVRSwgc2VwPScsJykNCnNzIDwtIG5hLm9taXQoc3MpDQpzcyA8LSBzc1soc3MkRzMgPD0gMjApICYgKHNzJEczID49IDApLCBdDQpzcyRQYXNzZWQgPC0gZmFjdG9yKGlmZWxzZShzcyRHMyA8IDEwLCAwLCAxKSkNCnNzJEcxIDwtIE5VTEwNCnNzJEcyIDwtIE5VTEwNCnNzJEczIDwtIE5VTEwNCnNzJGFic2VuY2VzIDwtIE5VTEwNCnN1bW1hcnkoc3MpDQpgYGANCg0KIyMjIyAqKkJhc2ljIEV4YW1wbGUgb2YgYSBSYW5kb20gRm9yZXN0KioNCg0KYGBge3J9DQpzZXQuc2VlZCgxKQ0KbTEgPC0gcmFuZG9tRm9yZXN0KFBhc3NlZCB+IC4sIHNzLCBpbXBvcnRhbmNlPVRSVUUpDQptMQ0KYGBgDQoNCiMjIyMgKipUdW5pbmcgb24gTnVtYmVyIG9mIFRyZWVzKioNCg0KYGBge3J9DQpwbG90KG0xKQ0KbGVnZW5kKCJ0b3ByaWdodCIsIGNleCA9MSwgbGVnZW5kPWNvbG5hbWVzKG0xJGVyci5yYXRlKSwgbHR5PWMoMSwyLDMpLCBjb2w9YygxLDIsMykpDQpgYGANCg0KYGBge3J9DQptMV9vcHRfbnRyZWVzIDwtIHdoaWNoLm1pbihtMSRlcnIucmF0ZVssJ09PQiddKQ0KbTFfb3B0X2Vycl9yYXRlIDwtIG1pbihtMSRlcnIucmF0ZVssJ09PQiddKQ0KDQpjYXQoIk9wdGltYWwgTnVtYmVyIG9mIFRyZWVzOiAiLCBtMV9vcHRfbnRyZWVzLCAiXG4iLA0KICAgICJNaW5pbXVtIEVycm9yIFJhdGU6ICAgICAgIiwgbTFfb3B0X2Vycl9yYXRlLCBzZXA9IiIpDQpgYGANCg0KYGBge3J9DQpzZXQuc2VlZCgxKQ0KbTIgPC0gcmFuZG9tRm9yZXN0KFBhc3NlZCB+IC4sIHNzLCBudHJlZT1tMV9vcHRfbnRyZWVzLCBpbXBvcnRhbmNlPVRSVUUpDQptMg0KYGBgDQoNCiMjIyMgKipUcmFpbmluZyBBY2N1cmFjeSoqDQoNCmBgYHtyfQ0KdHJhaW5fcHJlZGljdCA8LSBwcmVkaWN0KG0yLCBzcykNCm1lYW4odHJhaW5fcHJlZGljdCA9PSBzcyRQYXNzZWQpDQpgYGANCg0KDQojIyMjICoqVHVuaW5nIG9uIG10cnkgYW5kIG50cmVlKioNCg0KYGBge3J9DQpvb2JfYWNjX2xpc3QgPC0gYygpDQpvcHRfbnRyZWVfbGlzdCA8LSBjKCkNCg0KZm9yKGkgaW4gMToyOSl7DQogIHNldC5zZWVkKDEpDQogIHRlbXBfbW9kIDwtIHJhbmRvbUZvcmVzdChQYXNzZWQgfiAuLCBzcywgbnRyZWU9NTAwLCBpbXBvcnRhbmNlPVRSVUUsIG10cnk9aSkNCiAgb29iX2FjY19saXN0IDwtIGMob29iX2FjY19saXN0LCBtaW4odGVtcF9tb2QkZXJyLnJhdGVbLCdPT0InXSkpDQogIG9wdF9udHJlZV9saXN0IDwtIGMob3B0X250cmVlX2xpc3QsIHdoaWNoLm1pbih0ZW1wX21vZCRlcnIucmF0ZVssJ09PQiddKSkNCn0NCg0Kb3B0X210cnkgPC0gd2hpY2gubWluKG9vYl9hY2NfbGlzdCkNCm9wdF9udHJlZSA8LSBvcHRfbnRyZWVfbGlzdFtvcHRfbXRyeV0NCm1pbl9vb2JfYWNjIDwtIG1pbihvb2JfYWNjX2xpc3QpDQoNCmNhdCgiT3B0aW1hbCBWYWx1ZSBvZiBtdHJ5OiAgIiwgb3B0X210cnksICJcbiIsDQogICAgIk9wdGltYWwgVmFsdWUgb2YgbnRyZWU6ICIsIG9wdF9udHJlZSwgIlxuIiwNCiAgICAiTWluaW11bSBPT0IgQWNjdXJhY3k6ICAgIiwgbWluX29vYl9hY2MsIHNlcD0iIikNCmBgYA0KDQoNCmBgYHtyfQ0KcGxvdCgxOjI5LCBvb2JfYWNjX2xpc3QsIHhsYWI9IlZhbHVlIG9mIG10cnkiLCB5bGFiPSJNaW5pbXVtIE9PQiBBY2N1cmFjeSBTY29yZSIpDQpsaW5lcygxOjI5LCBvb2JfYWNjX2xpc3QpDQphYmxpbmUodj13aGljaC5taW4ob29iX2FjY19saXN0KSwgY29sPSJyZWQiLCBsdHk9MiwgbHdkPTEpDQpgYGANCg0KYGBge3J9DQpzZXQuc2VlZCgxKQ0KbTMgPC0gcmFuZG9tRm9yZXN0KFBhc3NlZCB+IC4sIHNzLCBudHJlZT00NDUsIG10cnk9MywgaW1wb3J0YW5jZT1UUlVFKQ0KbTMNCmBgYA0KDQojIyMjICoqRXN0aW1hdGluZyBPdXQtT2YtU2FtcGxlIFBlcmZvcm1hbmNlKioNCg0KYGBge3J9DQpzZXQuc2VlZCgxKQ0KdHJhaW4oUGFzc2VkIH4gLiwgc3MsIG1ldGhvZD0icmYiLCBudHJlZT00NDUsDQogICAgICB0ckNvbnRyb2wgPSB0cmFpbkNvbnRyb2wobWV0aG9kPSJjdiIsIG51bWJlcj0yMCksIA0KICAgICAgdHVuZUdyaWQgPSBleHBhbmQuZ3JpZChtdHJ5PWMoMykpKQ0KYGBgDQoNCmBgYHtyfQ0Kc2V0LnNlZWQoMSkNCnJhbmRvbUZvcmVzdChQYXNzZWQgfiAuLCBzcywgbnRyZWU9NDQ1LCBtdHJ5PTMsIGltcG9ydGFuY2U9VFJVRSwgDQogIHJlcGxhY2U9RkFMU0UsIHNhbXBzaXplPWZsb29yKDAuOCpucm93KHNzKSkpDQpgYGANCg0KYGBge3J9DQplcnJvcl9yYXRlX2xpc3QgPC0gYygpDQoNCmZvciAoaSBpbiAxOjIwKXsNCg0KICBzZXQuc2VlZChpKQ0KICB0ZW1wX21vZCA8LSByYW5kb21Gb3Jlc3QoUGFzc2VkIH4gLiwgc3MsIG50cmVlPTQ0NSwgbXRyeT0zLCBpbXBvcnRhbmNlPVRSVUUsIA0KICAgICAgICAgICAgICAgICAgICAgICAgICAgcmVwbGFjZT1GQUxTRSwgc2FtcHNpemU9Zmxvb3IoMC44Km5yb3coc3MpKSkNCiAgDQogIGVycm9yX3JhdGVfbGlzdCA8LSBjKGVycm9yX3JhdGVfbGlzdCwgdGVtcF9tb2QkZXJyLnJhdGVbNDQ1LCJPT0IiXSkNCn0NCg0KbWVhbihlcnJvcl9yYXRlX2xpc3QpDQpgYGANCg0KYGBge3J9DQpib3hwbG90KGVycm9yX3JhdGVfbGlzdCkNCmBgYA0KDQojIyMjICoqRmVhdHVyZSBJbXBvcnRhbmNlKioNCg0KYGBge3J9DQptMyRpbXBvcnRhbmNlDQpgYGANCg0KYGBge3IsIGZpZy5oZWlnaHQ9OCwgZmlnLndpZHRoPTh9DQp2YXJJbXBQbG90KG0zKQ0KYGBgDQoNCiMjIyAqKkV4YW1wbGUgMjogQW1lcyBIb3VzaW5nKioNCg0KYGBge3J9DQphbWVzIDwtIHJlYWQudGFibGUoJ2RhdGEvQW1lc0hvdXNpbmcudHh0JywgaGVhZGVyPVRSVUUsIHNlcD0nXHQnKQ0Kc2FwcGx5KGFtZXMsIGZ1bmN0aW9uKHgpIHN1bShpcy5uYSh4KSkpDQpgYGANCg0KDQpgYGB7cn0NCmFtZXMkT3JkZXIgPC0gTlVMTA0KYW1lcyRQSUQgPC0gTlVMTA0KYW1lcyRMb3QuRnJvbnRhZ2UgPC0gTlVMTA0KYW1lcyRBbGxleSA8LSBOVUxMDQphbWVzJE1pc2MuRmVhdHVyZSA8LSBOVUxMDQphbWVzJFBvb2wuUUMgPC0gTlVMTA0KYW1lcyRGaXJlcGxhY2UuUXUgPC0gTlVMTA0KYW1lcyRGZW5jZSA8LSBOVUxMDQphbWVzIDwtIG5hLm9taXQoYW1lcykNCm5yb3coYW1lcykNCmBgYA0KDQojIyMjICoqRWxhc3RpY25ldCBNb2RlbCoqDQoNCg0KYGBge3IsIHdhcm5pbmc9RkFMU0V9DQpzZXQuc2VlZCgxKQ0KZW5fbW9kIDwtIHRyYWluKFNhbGVQcmljZSB+IC4sIGFtZXMsIG1ldGhvZD0iZ2xtbmV0IiwgbWV0cmljPSJSc3F1YXJlZCIsIA0KICAgICAgICAgICAgICAgIHRyQ29udHJvbCA9IHRyYWluQ29udHJvbChtZXRob2Q9ImN2IiwgbnVtYmVyPTEwKSwNCiAgICAgICAgICAgICAgICB0dW5lR3JpZCA9IGV4cGFuZC5ncmlkKGFscGhhPXNlcSgwLCAxLCBieT0wLjIpLCANCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICBsYW1iZGE9ZXhwKHNlcSgyLDE0LGxlbmd0aD0xMDApKQ0KICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgKQ0KICAgICAgICAgICAgICAgICkNCiAgICAgICAgICAgICAgICAgDQoNCmJlc3RfaXggPC0gd2hpY2gubWF4KGVuX21vZCRyZXN1bHRzJFJzcXVhcmVkKQ0KZW5fbW9kJHJlc3VsdHNbYmVzdF9peCwgXQ0KYGBgDQoNCmBgYHtyfQ0KcGxvdChlbl9tb2QsIHBjaD0iIiwgeFRyYW5zPWxvZykNCmBgYA0KDQojIyMjICoqUmFuZG9tIEZvcmVzdCBNb2RlbCoqDQoNCmBgYHtyfQ0Kc2V0LnNlZWQoMSkNCm00IDwtIHJhbmRvbUZvcmVzdChTYWxlUHJpY2UgfiAuLCBhbWVzLCBpbXBvcnRhbmNlPVRSVUUpDQptNA0KYGBgDQoNCmBgYHtyfQ0KcGxvdChtNCkNCmBgYA0KDQojIyMjICoqRmVhdHVyZSBJbXBvcnRhbmNlKioNCg0KYGBge3IsIGZpZy5oZWlnaHQ9OCwgZmlnLndpZHRoPTh9DQp2YXJJbXBQbG90KG00KQ0KYGBgDQo=