Teoría

El Bosque Aleatorio es un algoritmo de aprendizaje automático que combina el resultado de múltiples árboles de decisión para llegar a un resultado óptimo.

Ejemplo 1. Melbourne

En esta base de datos tenemos los precios de más de 13,000 casas de la ciudad de Melbourne.

Instalar paquetas y llamar librerías

# install.packages("tidyverse")
library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ dplyr     1.1.4     ✔ readr     2.1.5
## ✔ forcats   1.0.0     ✔ stringr   1.5.1
## ✔ ggplot2   3.5.2     ✔ tibble    3.3.0
## ✔ lubridate 1.9.4     ✔ tidyr     1.3.1
## ✔ purrr     1.1.0     
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::filter() masks stats::filter()
## ✖ dplyr::lag()    masks stats::lag()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
# install.packages("rpart")
library(rpart)
# install.packages("rpart.plot")
library(rpart.plot)
# install.packages("randomForest")
library(randomForest)
## randomForest 4.7-1.2
## Type rfNews() to see new features/changes/bug fixes.
## 
## Adjuntando el paquete: 'randomForest'
## 
## The following object is masked from 'package:dplyr':
## 
##     combine
## 
## The following object is masked from 'package:ggplot2':
## 
##     margin
# install.packages("modelr") #Calcular erroes
library(modelr)
# install.packages("caret")
library(caret)
## Cargando paquete requerido: lattice
## 
## Adjuntando el paquete: 'caret'
## 
## The following object is masked from 'package:purrr':
## 
##     lift

Importar la base de datos

df <- read.csv("C:\\Users\\Adrián\\Downloads\\melbourne.csv")

Entender la base de datos

summary(df)
##     Suburb            Address              Rooms            Type          
##  Length:13580       Length:13580       Min.   : 1.000   Length:13580      
##  Class :character   Class :character   1st Qu.: 2.000   Class :character  
##  Mode  :character   Mode  :character   Median : 3.000   Mode  :character  
##                                        Mean   : 2.938                     
##                                        3rd Qu.: 3.000                     
##                                        Max.   :10.000                     
##                                                                           
##      Price            Method            SellerG              Date          
##  Min.   :  85000   Length:13580       Length:13580       Length:13580      
##  1st Qu.: 650000   Class :character   Class :character   Class :character  
##  Median : 903000   Mode  :character   Mode  :character   Mode  :character  
##  Mean   :1075684                                                           
##  3rd Qu.:1330000                                                           
##  Max.   :9000000                                                           
##                                                                            
##     Distance        Postcode       Bedroom2         Bathroom    
##  Min.   : 0.00   Min.   :3000   Min.   : 0.000   Min.   :0.000  
##  1st Qu.: 6.10   1st Qu.:3044   1st Qu.: 2.000   1st Qu.:1.000  
##  Median : 9.20   Median :3084   Median : 3.000   Median :1.000  
##  Mean   :10.14   Mean   :3105   Mean   : 2.915   Mean   :1.534  
##  3rd Qu.:13.00   3rd Qu.:3148   3rd Qu.: 3.000   3rd Qu.:2.000  
##  Max.   :48.10   Max.   :3977   Max.   :20.000   Max.   :8.000  
##                                                                 
##       Car           Landsize         BuildingArea     YearBuilt   
##  Min.   : 0.00   Min.   :     0.0   Min.   :    0   Min.   :1196  
##  1st Qu.: 1.00   1st Qu.:   177.0   1st Qu.:   93   1st Qu.:1940  
##  Median : 2.00   Median :   440.0   Median :  126   Median :1970  
##  Mean   : 1.61   Mean   :   558.4   Mean   :  152   Mean   :1965  
##  3rd Qu.: 2.00   3rd Qu.:   651.0   3rd Qu.:  174   3rd Qu.:1999  
##  Max.   :10.00   Max.   :433014.0   Max.   :44515   Max.   :2018  
##  NA's   :62                         NA's   :6450    NA's   :5375  
##  CouncilArea          Lattitude        Longtitude     Regionname       
##  Length:13580       Min.   :-38.18   Min.   :144.4   Length:13580      
##  Class :character   1st Qu.:-37.86   1st Qu.:144.9   Class :character  
##  Mode  :character   Median :-37.80   Median :145.0   Mode  :character  
##                     Mean   :-37.81   Mean   :145.0                     
##                     3rd Qu.:-37.76   3rd Qu.:145.1                     
##                     Max.   :-37.41   Max.   :145.5                     
##                                                                        
##  Propertycount  
##  Min.   :  249  
##  1st Qu.: 4380  
##  Median : 6555  
##  Mean   : 7454  
##  3rd Qu.:10331  
##  Max.   :21650  
## 
str(df)
## 'data.frame':    13580 obs. of  21 variables:
##  $ Suburb       : chr  "Abbotsford" "Abbotsford" "Abbotsford" "Abbotsford" ...
##  $ Address      : chr  "85 Turner St" "25 Bloomburg St" "5 Charles St" "40 Federation La" ...
##  $ Rooms        : int  2 2 3 3 4 2 3 2 1 2 ...
##  $ Type         : chr  "h" "h" "h" "h" ...
##  $ Price        : num  1480000 1035000 1465000 850000 1600000 ...
##  $ Method       : chr  "S" "S" "SP" "PI" ...
##  $ SellerG      : chr  "Biggin" "Biggin" "Biggin" "Biggin" ...
##  $ Date         : chr  "3/12/2016" "4/02/2016" "4/03/2017" "4/03/2017" ...
##  $ Distance     : num  2.5 2.5 2.5 2.5 2.5 2.5 2.5 2.5 2.5 2.5 ...
##  $ Postcode     : num  3067 3067 3067 3067 3067 ...
##  $ Bedroom2     : num  2 2 3 3 3 2 4 2 1 3 ...
##  $ Bathroom     : num  1 1 2 2 1 1 2 1 1 1 ...
##  $ Car          : num  1 0 0 1 2 0 0 2 1 2 ...
##  $ Landsize     : num  202 156 134 94 120 181 245 256 0 220 ...
##  $ BuildingArea : num  NA 79 150 NA 142 NA 210 107 NA 75 ...
##  $ YearBuilt    : num  NA 1900 1900 NA 2014 ...
##  $ CouncilArea  : chr  "Yarra" "Yarra" "Yarra" "Yarra" ...
##  $ Lattitude    : num  -37.8 -37.8 -37.8 -37.8 -37.8 ...
##  $ Longtitude   : num  145 145 145 145 145 ...
##  $ Regionname   : chr  "Northern Metropolitan" "Northern Metropolitan" "Northern Metropolitan" "Northern Metropolitan" ...
##  $ Propertycount: num  4019 4019 4019 4019 4019 ...
df <- na.omit(df)

Árbol de Decisión

arbol <- rpart(Price ~ Rooms + Distance + Bedroom2 + Bathroom + Car + Landsize 
               + BuildingArea + YearBuilt + Propertycount, data = df)
plot(arbol,uniform=TRUE)
text(arbol, cex=.5)

predict(arbol,head(df))
##       2       3       5       7       8      10 
## 1095996 1562641 1070605 2422140 1095996 1095996
head(df$Price)   
## [1] 1035000 1465000 1600000 1876000 1636000 1097000
prueba_arbol <- head(df)

mae_arbol <- mae(arbol,prueba_arbol)

Bosque Aleatorio

set.seed(123)
renglones_entrenamiento <- createDataPartition(df$Price, p=0.8, list=FALSE)
entrenamiento <- df[renglones_entrenamiento,]
prueba <- df[-renglones_entrenamiento, ]

rf <- randomForest(Price ~ Rooms + Distance + Bedroom2 + Bathroom + Car + Landsize 
               + BuildingArea + YearBuilt + Propertycount, data = entrenamiento,                         ntree=500, mtry=3, importance=TRUE)

resultado_entrenamiento <- predict(rf,entrenamiento)   
resultado_prueba <- predict(rf,prueba)

mae_rf <- mae(rf,prueba)

resultados <- tibble(Modelo = c("Árbol de Decisión","Bosque Aleatorio"), MAE =c(mae_arbol,mae_rf))
resultados
## # A tibble: 2 × 2
##   Modelo                MAE
##   <chr>               <dbl>
## 1 Árbol de Decisión 295863.
## 2 Bosque Aleatorio  213019.

Ejercicio 1. Rendimiento Automotriz

Importar la base de datos

df2 <- mtcars

Entender la base de datos

summary(df2)
##       mpg             cyl             disp             hp       
##  Min.   :10.40   Min.   :4.000   Min.   : 71.1   Min.   : 52.0  
##  1st Qu.:15.43   1st Qu.:4.000   1st Qu.:120.8   1st Qu.: 96.5  
##  Median :19.20   Median :6.000   Median :196.3   Median :123.0  
##  Mean   :20.09   Mean   :6.188   Mean   :230.7   Mean   :146.7  
##  3rd Qu.:22.80   3rd Qu.:8.000   3rd Qu.:326.0   3rd Qu.:180.0  
##  Max.   :33.90   Max.   :8.000   Max.   :472.0   Max.   :335.0  
##       drat             wt             qsec             vs        
##  Min.   :2.760   Min.   :1.513   Min.   :14.50   Min.   :0.0000  
##  1st Qu.:3.080   1st Qu.:2.581   1st Qu.:16.89   1st Qu.:0.0000  
##  Median :3.695   Median :3.325   Median :17.71   Median :0.0000  
##  Mean   :3.597   Mean   :3.217   Mean   :17.85   Mean   :0.4375  
##  3rd Qu.:3.920   3rd Qu.:3.610   3rd Qu.:18.90   3rd Qu.:1.0000  
##  Max.   :4.930   Max.   :5.424   Max.   :22.90   Max.   :1.0000  
##        am              gear            carb      
##  Min.   :0.0000   Min.   :3.000   Min.   :1.000  
##  1st Qu.:0.0000   1st Qu.:3.000   1st Qu.:2.000  
##  Median :0.0000   Median :4.000   Median :2.000  
##  Mean   :0.4062   Mean   :3.688   Mean   :2.812  
##  3rd Qu.:1.0000   3rd Qu.:4.000   3rd Qu.:4.000  
##  Max.   :1.0000   Max.   :5.000   Max.   :8.000
str(df2)
## 'data.frame':    32 obs. of  11 variables:
##  $ mpg : num  21 21 22.8 21.4 18.7 18.1 14.3 24.4 22.8 19.2 ...
##  $ cyl : num  6 6 4 6 8 6 8 4 4 6 ...
##  $ disp: num  160 160 108 258 360 ...
##  $ hp  : num  110 110 93 110 175 105 245 62 95 123 ...
##  $ drat: num  3.9 3.9 3.85 3.08 3.15 2.76 3.21 3.69 3.92 3.92 ...
##  $ wt  : num  2.62 2.88 2.32 3.21 3.44 ...
##  $ qsec: num  16.5 17 18.6 19.4 17 ...
##  $ vs  : num  0 0 1 1 0 1 0 1 1 1 ...
##  $ am  : num  1 1 1 0 0 0 0 0 0 0 ...
##  $ gear: num  4 4 4 3 3 3 3 4 4 4 ...
##  $ carb: num  4 4 1 1 2 1 4 2 2 4 ...

Árbol de Decisión

arbol <- rpart(mpg ~ cyl + disp + hp + drat + wt + qsec + gear + carb, data = df2)
plot(arbol,uniform=TRUE)
text(arbol, cex=.5)

predict(arbol,head(df2))
##         Mazda RX4     Mazda RX4 Wag        Datsun 710    Hornet 4 Drive 
##          18.26429          18.26429          26.66364          18.26429 
## Hornet Sportabout           Valiant 
##          18.26429          18.26429
head(df2$mpg)   
## [1] 21.0 21.0 22.8 21.4 18.7 18.1
prueba_arbol <- head(df2)

mae_arbol <- mae(arbol,prueba_arbol)
 
rpart.plot(arbol, type=2, extra=101)

Bosque Aleatorio

set.seed(123)
renglones_entrenamiento <- createDataPartition(df2$mpg, p=0.8, list=FALSE)
entrenamiento <- df2[renglones_entrenamiento,]
prueba <- df2[-renglones_entrenamiento, ]

rf <- randomForest(mpg ~ cyl + disp + hp + drat + wt + qsec + gear + carb, data = 
                  entrenamiento, ntree=500, mtry=3, importance=TRUE)

resultado_entrenamiento <- predict(rf,entrenamiento)   
resultado_prueba <- predict(rf,prueba)

mae_rf <- mae(rf,prueba)

resultados <- tibble(Modelo = c("Árbol de Decisión","Bosque Aleatorio"), MAE =c(mae_arbol,mae_rf))
resultados
## # A tibble: 2 × 2
##   Modelo              MAE
##   <chr>             <dbl>
## 1 Árbol de Decisión  2.18
## 2 Bosque Aleatorio   1.75

Conclusiones

Debido a la elaboración de múltiples iteraciones, los bosques aleatorios permiten obtener un modelo más robusto y preciso para realizar predicciones en comparación a un árbol de decisión. Esto puede observarse en ambos ejemplos, donde el bosque aleatorio obtiene mejores resultados en relación al arból de decisión.

LS0tDQp0aXRsZTogIkJvc3F1ZSBBbGVhdG9yaW8iDQphdXRob3I6ICJMdWlzIETDoXZpbGEgLSBBMDEyODU1MjEiDQpkYXRlOiAiMjAyNS0wOC0yNyINCm91dHB1dDogDQogIGh0bWxfZG9jdW1lbnQ6DQogICAgdG9jOiBUUlVFDQogICAgdG9jX2Zsb2F0OiBUUlVFDQogICAgY29kZV9kb3dubG9hZDogVFJVRQ0KICAgIHRoZW1lOiBqb3VybmFsDQotLS0NCg0KIVtdKGh0dHBzOi8vY29udGVudC5yOWNkbi5uZXQvcmltZy9kaW1nL2U3L2UyL2EwOTJlOTNiLWNpdHktMTM5OTgtMTY0MWVhYmE4YTMuanBnP3dpZHRoPTEyMDAmaGVpZ2h0PTYzMCZ4aGludD0xMDE2JnloaW50PTEwMjQmY3JvcD10cnVlKQ0KDQojIDxzcGFuIHN0eWxlPSJjb2xvcjpicm93biI+IFRlb3LDrWEgPC9zcGFuPiAgICAgICAgIA0KRWwgKipCb3NxdWUgQWxlYXRvcmlvKiogZXMgdW4gYWxnb3JpdG1vIGRlIGFwcmVuZGl6YWplIGF1dG9tw6F0aWNvIHF1ZSBjb21iaW5hIGVsIHJlc3VsdGFkbyBkZSBtw7psdGlwbGVzIMOhcmJvbGVzIGRlIGRlY2lzacOzbiBwYXJhIGxsZWdhciBhIHVuIHJlc3VsdGFkbyDDs3B0aW1vLiAgICANCg0KIyA8c3BhbiBzdHlsZT0iY29sb3I6YnJvd24iPiBFamVtcGxvIDEuIE1lbGJvdXJuZSA8L3NwYW4+ICAgIA0KRW4gZXN0YSBiYXNlIGRlIGRhdG9zIHRlbmVtb3MgbG9zIHByZWNpb3MgZGUgbcOhcyBkZSAxMywwMDAgY2FzYXMgZGUgbGEgY2l1ZGFkIGRlIE1lbGJvdXJuZS4gICAgIA0KDQojIyA8c3BhbiBzdHlsZT0iY29sb3I6YnJvd24iPiBJbnN0YWxhciBwYXF1ZXRhcyB5IGxsYW1hciBsaWJyZXLDrWFzIDwvc3Bhbj4gICAgICAgICANCmBgYHtyfQ0KIyBpbnN0YWxsLnBhY2thZ2VzKCJ0aWR5dmVyc2UiKQ0KbGlicmFyeSh0aWR5dmVyc2UpDQojIGluc3RhbGwucGFja2FnZXMoInJwYXJ0IikNCmxpYnJhcnkocnBhcnQpDQojIGluc3RhbGwucGFja2FnZXMoInJwYXJ0LnBsb3QiKQ0KbGlicmFyeShycGFydC5wbG90KQ0KIyBpbnN0YWxsLnBhY2thZ2VzKCJyYW5kb21Gb3Jlc3QiKQ0KbGlicmFyeShyYW5kb21Gb3Jlc3QpDQojIGluc3RhbGwucGFja2FnZXMoIm1vZGVsciIpICNDYWxjdWxhciBlcnJvZXMNCmxpYnJhcnkobW9kZWxyKQ0KIyBpbnN0YWxsLnBhY2thZ2VzKCJjYXJldCIpDQpsaWJyYXJ5KGNhcmV0KQ0KYGBgDQoNCiMjIDxzcGFuIHN0eWxlPSJjb2xvcjpicm93biI+IEltcG9ydGFyIGxhIGJhc2UgZGUgZGF0b3MgPC9zcGFuPiAgICAgDQpgYGB7cn0NCmRmIDwtIHJlYWQuY3N2KCJDOlxcVXNlcnNcXEFkcmnDoW5cXERvd25sb2Fkc1xcbWVsYm91cm5lLmNzdiIpDQpgYGANCg0KDQojIyA8c3BhbiBzdHlsZT0iY29sb3I6YnJvd24iPiBFbnRlbmRlciBsYSBiYXNlIGRlIGRhdG9zIDwvc3Bhbj4gDQpgYGB7cn0NCnN1bW1hcnkoZGYpDQpzdHIoZGYpDQpkZiA8LSBuYS5vbWl0KGRmKQ0KYGBgDQoNCiMjIDxzcGFuIHN0eWxlPSJjb2xvcjpicm93biI+IMOBcmJvbCBkZSBEZWNpc2nDs24gPC9zcGFuPiAgICANCmBgYHtyfQ0KYXJib2wgPC0gcnBhcnQoUHJpY2UgfiBSb29tcyArIERpc3RhbmNlICsgQmVkcm9vbTIgKyBCYXRocm9vbSArIENhciArIExhbmRzaXplIA0KICAgICAgICAgICAgICAgKyBCdWlsZGluZ0FyZWEgKyBZZWFyQnVpbHQgKyBQcm9wZXJ0eWNvdW50LCBkYXRhID0gZGYpDQpwbG90KGFyYm9sLHVuaWZvcm09VFJVRSkNCnRleHQoYXJib2wsIGNleD0uNSkNCg0KcHJlZGljdChhcmJvbCxoZWFkKGRmKSkNCmhlYWQoZGYkUHJpY2UpICAgDQpwcnVlYmFfYXJib2wgPC0gaGVhZChkZikNCg0KbWFlX2FyYm9sIDwtIG1hZShhcmJvbCxwcnVlYmFfYXJib2wpDQpgYGANCg0KIyMgPHNwYW4gc3R5bGU9ImNvbG9yOmJyb3duIj4gQm9zcXVlIEFsZWF0b3JpbyA8L3NwYW4+ICAgIA0KYGBge3J9DQpzZXQuc2VlZCgxMjMpDQpyZW5nbG9uZXNfZW50cmVuYW1pZW50byA8LSBjcmVhdGVEYXRhUGFydGl0aW9uKGRmJFByaWNlLCBwPTAuOCwgbGlzdD1GQUxTRSkNCmVudHJlbmFtaWVudG8gPC0gZGZbcmVuZ2xvbmVzX2VudHJlbmFtaWVudG8sXQ0KcHJ1ZWJhIDwtIGRmWy1yZW5nbG9uZXNfZW50cmVuYW1pZW50bywgXQ0KDQpyZiA8LSByYW5kb21Gb3Jlc3QoUHJpY2UgfiBSb29tcyArIERpc3RhbmNlICsgQmVkcm9vbTIgKyBCYXRocm9vbSArIENhciArIExhbmRzaXplIA0KICAgICAgICAgICAgICAgKyBCdWlsZGluZ0FyZWEgKyBZZWFyQnVpbHQgKyBQcm9wZXJ0eWNvdW50LCBkYXRhID0gZW50cmVuYW1pZW50bywgICAgICAgICAgICAgICAgICAgICAgICAgbnRyZWU9NTAwLCBtdHJ5PTMsIGltcG9ydGFuY2U9VFJVRSkNCg0KcmVzdWx0YWRvX2VudHJlbmFtaWVudG8gPC0gcHJlZGljdChyZixlbnRyZW5hbWllbnRvKSAgIA0KcmVzdWx0YWRvX3BydWViYSA8LSBwcmVkaWN0KHJmLHBydWViYSkNCg0KbWFlX3JmIDwtIG1hZShyZixwcnVlYmEpDQoNCnJlc3VsdGFkb3MgPC0gdGliYmxlKE1vZGVsbyA9IGMoIsOBcmJvbCBkZSBEZWNpc2nDs24iLCJCb3NxdWUgQWxlYXRvcmlvIiksIE1BRSA9YyhtYWVfYXJib2wsbWFlX3JmKSkNCnJlc3VsdGFkb3MNCmBgYA0KDQoNCiMgPHNwYW4gc3R5bGU9ImNvbG9yOmJyb3duIj4gRWplcmNpY2lvIDEuIFJlbmRpbWllbnRvIEF1dG9tb3RyaXogPC9zcGFuPiANCg0KIyMgPHNwYW4gc3R5bGU9ImNvbG9yOmJyb3duIj4gSW1wb3J0YXIgbGEgYmFzZSBkZSBkYXRvcyA8L3NwYW4+IA0KYGBge3J9DQpkZjIgPC0gbXRjYXJzDQpgYGANCg0KIyMgPHNwYW4gc3R5bGU9ImNvbG9yOmJyb3duIj4gRW50ZW5kZXIgbGEgYmFzZSBkZSBkYXRvcyA8L3NwYW4+IA0KYGBge3J9DQpzdW1tYXJ5KGRmMikNCnN0cihkZjIpDQpgYGANCg0KIyMgPHNwYW4gc3R5bGU9ImNvbG9yOmJyb3duIj4gw4FyYm9sIGRlIERlY2lzacOzbiA8L3NwYW4+ICAgIA0KYGBge3J9DQphcmJvbCA8LSBycGFydChtcGcgfiBjeWwgKyBkaXNwICsgaHAgKyBkcmF0ICsgd3QgKyBxc2VjICsgZ2VhciArIGNhcmIsIGRhdGEgPSBkZjIpDQpwbG90KGFyYm9sLHVuaWZvcm09VFJVRSkNCnRleHQoYXJib2wsIGNleD0uNSkNCg0KcHJlZGljdChhcmJvbCxoZWFkKGRmMikpDQpoZWFkKGRmMiRtcGcpICAgDQpwcnVlYmFfYXJib2wgPC0gaGVhZChkZjIpDQoNCm1hZV9hcmJvbCA8LSBtYWUoYXJib2wscHJ1ZWJhX2FyYm9sKQ0KIA0KcnBhcnQucGxvdChhcmJvbCwgdHlwZT0yLCBleHRyYT0xMDEpDQpgYGANCg0KIyMgPHNwYW4gc3R5bGU9ImNvbG9yOmJyb3duIj4gQm9zcXVlIEFsZWF0b3JpbyA8L3NwYW4+IA0KYGBge3J9DQpzZXQuc2VlZCgxMjMpDQpyZW5nbG9uZXNfZW50cmVuYW1pZW50byA8LSBjcmVhdGVEYXRhUGFydGl0aW9uKGRmMiRtcGcsIHA9MC44LCBsaXN0PUZBTFNFKQ0KZW50cmVuYW1pZW50byA8LSBkZjJbcmVuZ2xvbmVzX2VudHJlbmFtaWVudG8sXQ0KcHJ1ZWJhIDwtIGRmMlstcmVuZ2xvbmVzX2VudHJlbmFtaWVudG8sIF0NCg0KcmYgPC0gcmFuZG9tRm9yZXN0KG1wZyB+IGN5bCArIGRpc3AgKyBocCArIGRyYXQgKyB3dCArIHFzZWMgKyBnZWFyICsgY2FyYiwgZGF0YSA9IA0KICAgICAgICAgICAgICAgICAgZW50cmVuYW1pZW50bywgbnRyZWU9NTAwLCBtdHJ5PTMsIGltcG9ydGFuY2U9VFJVRSkNCg0KcmVzdWx0YWRvX2VudHJlbmFtaWVudG8gPC0gcHJlZGljdChyZixlbnRyZW5hbWllbnRvKSAgIA0KcmVzdWx0YWRvX3BydWViYSA8LSBwcmVkaWN0KHJmLHBydWViYSkNCg0KbWFlX3JmIDwtIG1hZShyZixwcnVlYmEpDQoNCnJlc3VsdGFkb3MgPC0gdGliYmxlKE1vZGVsbyA9IGMoIsOBcmJvbCBkZSBEZWNpc2nDs24iLCJCb3NxdWUgQWxlYXRvcmlvIiksIE1BRSA9YyhtYWVfYXJib2wsbWFlX3JmKSkNCnJlc3VsdGFkb3MNCmBgYA0KDQojIDxzcGFuIHN0eWxlPSJjb2xvcjpicm93biI+IENvbmNsdXNpb25lcyA8L3NwYW4+ICAgICAgDQpEZWJpZG8gYSBsYSBlbGFib3JhY2nDs24gZGUgbcO6bHRpcGxlcyBpdGVyYWNpb25lcywgbG9zIGJvc3F1ZXMgYWxlYXRvcmlvcyBwZXJtaXRlbiBvYnRlbmVyIHVuIG1vZGVsbyBtw6FzIHJvYnVzdG8geSBwcmVjaXNvIHBhcmEgcmVhbGl6YXIgcHJlZGljY2lvbmVzIGVuIGNvbXBhcmFjacOzbiBhIHVuIMOhcmJvbCBkZSBkZWNpc2nDs24uIEVzdG8gcHVlZGUgb2JzZXJ2YXJzZSBlbiBhbWJvcyBlamVtcGxvcywgZG9uZGUgZWwgYm9zcXVlIGFsZWF0b3JpbyBvYnRpZW5lIG1lam9yZXMgcmVzdWx0YWRvcyBlbiByZWxhY2nDs24gYWwgYXJiw7NsIGRlIGRlY2lzacOzbi4=