This is an R Markdown Notebook. When you execute code within the notebook, the results appear beneath the code.

Try executing this chunk by clicking the Run button within the chunk or by placing your cursor inside it and pressing Ctrl+Shift+Enter.

library(xgboost)
library(caret)  
library(e1071)  
setwd("C:/Users/cmhon/Downloads")
경고: The working directory was changed to C:/Users/cmhon/Downloads inside a notebook chunk. The working directory will be reset when the chunk is finished running. Use the knitr root.dir option in the setup chunk to change the working directory for notebook chunks.
library(tigerstats )
library(tidyverse)
library(DT)
library(psych)

# PLS:SEM 
df =read.csv("df.csv")

colnames(df)
 [1] "Type"              "ID"                "X3_card"           "X5_pay.amount"    
 [5] "gender"            "age"               "region"            "smartphone"       
 [9] "online_perception" "on_Simple"         "On_Card"           "Off_Simple"       
[13] "Off_Card"          "WTP"               "bag"               "shoes"            
[17] "tshirt"            "clock"             "convenience"       "pain"             
[21] "adoption"          "form"             
df$ID             =   as.character(df$ID)
df$gender         =   as.factor(df$gender)
df$age          =   as.factor(df$age)
df$region         =   as.factor(df$region)
df$smartphone   =   as.factor(df$smartphone)
#df$On_Simple   =   as.numeric(df$On_Simple)
df$On_Card    =   as.numeric(df$On_Card )
df$Off_Simple =  as.numeric(df$Off_Simple)
df$Off_Card  =   as.numeric(df$Off_Card)
#data <- iris               # reads the dataset
data = df
head(data)           # head() returns the top 6 rows of the dataframe

summary(data)       # returns the statistical summary of the data columns
     Type                ID              X3_card          X5_pay.amount      gender  
 Length:2964        Length:2964        Length:2964        Min.   :   10000   F:1444  
 Class :character   Class :character   Class :character   1st Qu.:  300000   M:1520  
 Mode  :character   Mode  :character   Mode  :character   Median :  500000           
                                                          Mean   : 1068121           
                                                          3rd Qu.: 1000000           
                                                          Max.   :10000000           
  age                   region       smartphone   online_perception    on_Simple      
 20s:460   metropolitan    :1704   Apple  : 936   Length:2964        Min.   :      0  
 30s:588   non-metropolitan:1260   other  :  72   Class :character   1st Qu.:  37500  
 40s:632                           Samsung:1956   Mode  :character   Median : 125000  
 50s:712                                                             Mean   : 285043  
 60s:572                                                             3rd Qu.: 300000  
                                                                     Max.   :5000000  
    On_Card           Off_Simple         Off_Card            WTP        
 Min.   :       0   Min.   :      0   Min.   :      0   Min.   :     0  
 1st Qu.:       0   1st Qu.:      0   1st Qu.:  50000   1st Qu.: 18975  
 Median :   75000   Median :  30000   Median : 150000   Median : 28000  
 Mean   :  265372   Mean   : 183301   Mean   : 332731   Mean   : 29844  
 3rd Qu.:  212121   3rd Qu.: 113861   3rd Qu.: 325000   3rd Qu.: 36250  
 Max.   :10000000   Max.   :2500000   Max.   :6300000   Max.   :175775  
      bag             shoes            tshirt           clock         convenience   
 Min.   : -1500   Min.   : -5000   Min.   :     0   Min.   :     0   Min.   :1.000  
 1st Qu.: 21000   1st Qu.: 26000   1st Qu.:  7900   1st Qu.: 10000   1st Qu.:4.000  
 Median : 35500   Median : 39000   Median : 11000   Median : 16000   Median :6.000  
 Mean   : 41529   Mean   : 44322   Mean   : 14019   Mean   : 19507   Mean   :5.237  
 3rd Qu.: 51000   3rd Qu.: 54000   3rd Qu.: 17800   3rd Qu.: 22500   3rd Qu.:7.000  
 Max.   :199500   Max.   :296000   Max.   :201800   Max.   :501000   Max.   :7.000  
      pain          adoption          form     
 Min.   :1.000   Min.   :1.000   Min.   :1.00  
 1st Qu.:3.000   1st Qu.:3.000   1st Qu.:1.75  
 Median :5.000   Median :5.000   Median :2.50  
 Mean   :4.832   Mean   :4.659   Mean   :2.50  
 3rd Qu.:6.000   3rd Qu.:7.000   3rd Qu.:3.25  
 Max.   :7.000   Max.   :7.000   Max.   :4.00  
dim(data)
[1] 2964   22
data = na.omit(data)
str(data)
'data.frame':   2964 obs. of  22 variables:
 $ Type             : chr  "A" "A" "A" "A" ...
 $ ID               : chr  "1" "1" "1" "1" ...
 $ X3_card          : chr  "YES" "YES" "YES" "YES" ...
 $ X5_pay.amount    : int  300000 300000 300000 300000 300000 300000 300000 100000 100000 100000 ...
 $ gender           : Factor w/ 2 levels "F","M": 2 2 2 2 2 2 2 1 1 1 ...
 $ age              : Factor w/ 5 levels "20s","30s","40s",..: 3 3 3 3 3 3 3 1 1 1 ...
 $ region           : Factor w/ 2 levels "metropolitan",..: 2 2 2 2 2 2 2 2 2 2 ...
 $ smartphone       : Factor w/ 3 levels "Apple","other",..: 1 1 1 1 1 1 1 1 1 1 ...
 $ online_perception: chr  "offline is 30% more expensive " "offline is 30% more expensive " "offline is 30% more expensive " "offline is 30% more expensive " ...
 $ on_Simple        : num  150000 150000 150000 150000 150000 150000 150000 90000 90000 90000 ...
 $ On_Card          : num  0 0 0 0 0 0 0 0 0 0 ...
 $ Off_Simple       : num  0 0 0 0 0 0 0 0 0 0 ...
 $ Off_Card         : num  150000 150000 150000 150000 150000 150000 150000 10000 10000 10000 ...
 $ WTP              : num  35000 35000 34750 34750 35250 ...
 $ bag              : num  34000 34000 33000 34000 33000 32000 34000 14000 12000 19000 ...
 $ shoes            : num  73000 72000 73000 72000 73000 73000 72000 18000 16000 16000 ...
 $ tshirt           : num  2000 4000 3000 3000 4000 3000 4000 6000 8000 4000 ...
 $ clock            : num  31000 30000 30000 30000 31000 29000 30000 8000 9000 9000 ...
 $ convenience      : int  6 6 6 6 6 6 6 6 6 6 ...
 $ pain             : int  6 6 6 6 6 6 6 6 6 6 ...
 $ adoption         : int  6 6 6 6 6 6 6 7 7 7 ...
 $ form             : int  1 1 1 1 1 1 1 1 1 1 ...
data <- data%>%
        mutate(htp = on_Simple  / X5_pay.amount
)

quantile(data$htp, probs = seq(.1, .9, by = .1))
      10%       20%       30%       40%       50%       60%       70%       80% 
0.0000000 0.1000000 0.1274510 0.2500000 0.3000000 0.3000000 0.4545455 0.5000000 
      90% 
0.5600000 
data <- data %>%
     mutate( htp_y = case_when(
     htp <= .1  ~ 0.1,
     htp > .1 & htp <= .3  ~ 0.3,
     htp > .3 & htp <= .5  ~ 0.5,
     htp > .5             ~ 0.7,
     TRUE                      ~ 0
    ))
  
# createDataPartition() function from the caret package to split the original dataset into a training and testing set and split data into training (80%) and testing set (20%)
parts = createDataPartition(data$X3_card, p = 0.7, list = F)
train = data[parts, ]
test = data[-parts, ]

X_train = data.matrix(train[,c(5:8, 19:22)])                  # independent variables for train
y_train = train[,23]                                # dependent variables for train
  
X_test = data.matrix(test[,c(5:8, 19:22)])                    # independent variables for test
y_test = test[,23]                                   # dependent variables for test

# convert the train and test data into xgboost matrix type.
xgboost_train = xgb.DMatrix(data=X_train, label=y_train)
xgboost_test = xgb.DMatrix(data=X_test, label=y_test)

#Step 4 - Create a xgboost model

# train a model using our training data
model <- xgboost(data = xgboost_train,                    # the data   
                 max.depth=3,                        # max depth 
                 nrounds=50)                              # max number of boosting iterations
[1] train-rmse:0.250295 
[2] train-rmse:0.224256 
[3] train-rmse:0.209770 
[4] train-rmse:0.201843 
[5] train-rmse:0.194773 
[6] train-rmse:0.190535 
[7] train-rmse:0.187136 
[8] train-rmse:0.185081 
[9] train-rmse:0.183017 
[10]    train-rmse:0.181942 
[11]    train-rmse:0.179818 
[12]    train-rmse:0.178274 
[13]    train-rmse:0.177495 
[14]    train-rmse:0.176663 
[15]    train-rmse:0.175846 
[16]    train-rmse:0.175016 
[17]    train-rmse:0.173712 
[18]    train-rmse:0.172527 
[19]    train-rmse:0.170960 
[20]    train-rmse:0.169812 
[21]    train-rmse:0.169115 
[22]    train-rmse:0.168392 
[23]    train-rmse:0.167760 
[24]    train-rmse:0.167116 
[25]    train-rmse:0.166189 
[26]    train-rmse:0.165830 
[27]    train-rmse:0.165181 
[28]    train-rmse:0.164877 
[29]    train-rmse:0.164485 
[30]    train-rmse:0.163625 
[31]    train-rmse:0.162820 
[32]    train-rmse:0.162391 
[33]    train-rmse:0.162145 
[34]    train-rmse:0.161212 
[35]    train-rmse:0.160740 
[36]    train-rmse:0.159992 
[37]    train-rmse:0.159201 
[38]    train-rmse:0.158547 
[39]    train-rmse:0.158055 
[40]    train-rmse:0.157647 
[41]    train-rmse:0.157385 
[42]    train-rmse:0.157015 
[43]    train-rmse:0.156774 
[44]    train-rmse:0.156581 
[45]    train-rmse:0.155936 
[46]    train-rmse:0.154990 
[47]    train-rmse:0.154560 
[48]    train-rmse:0.154190 
[49]    train-rmse:0.153761 
[50]    train-rmse:0.153397 
summary(model)
               Length Class              Mode       
handle             1  xgb.Booster.handle externalptr
raw            62329  -none-             raw        
niter              1  -none-             numeric    
evaluation_log     2  data.table         list       
call              14  -none-             call       
params             2  -none-             list       
callbacks          2  -none-             list       
feature_names      8  -none-             character  
nfeatures          1  -none-             numeric    
#use model to make predictions on test data
pred_test = predict(model, xgboost_test)

pred_test
  [1]  0.49562582  0.34232670  0.30760074  0.30760074  0.47551623  0.64210802  0.64210802
  [8]  0.64210802  0.20175675  0.64210802  0.64210802  0.64210802  0.21589974  0.35249218
 [15]  0.20475788  0.38703600  0.37628239  0.28732610  0.19198801  0.19784895  0.45655271
 [22]  0.30760074  0.41801429  0.20687917  0.31498781  0.51050818  0.46573764  0.10759953
 [29]  0.46573764  0.42691743  0.42691743  0.42691743  0.43340710  0.43340710  0.34736627
 [36]  0.34394568  0.34394568  0.34394568  0.34394568  0.14561523  0.14561523  0.45529976
 [43]  0.45529976  0.39527428  0.46537521  0.50237471  0.20411994  0.35562193  0.35562193
 [50]  0.35562193  0.35562193  0.35562193  0.35562193  0.40162998  0.40188736  0.33581066
 [57]  0.33581066  0.43571210  0.43571210  0.43571210  0.43571210  0.31092012  0.31092012
 [64]  0.15577741  0.44665274  0.51394439  0.23632564  0.23632564  0.23632564  0.23632564
 [71]  0.44812590  0.41410708  0.25150108  0.25150108  0.25150108  0.44270265  0.35592285
 [78]  0.35592285  0.60760623  0.61399084  0.61399084  0.61399084  0.61399084  0.33988720
 [85]  0.32242849  0.45775384  0.35592285  0.30938613  0.41446561  0.26407430  0.13577156
 [92]  0.42507175  0.36516449  0.36516449  0.30760074  0.30760074  0.30760074  0.30760074
 [99]  0.14157058  0.14157058  0.14157058  0.14157058  0.18862273  0.18862273  0.14846650
[106]  0.14846650  0.14846650  0.18862273  0.18862273  0.18862273  0.18862273  0.38395065
[113]  0.23266022  0.32702810  0.32702810  0.32386610  0.21589974  0.21589974  0.21589974
[120]  0.28585944  0.11885192  0.11885192  0.11885192  0.09480507  0.09480507  0.46779019
[127]  0.31134245  0.45726413  0.45726413  0.05516073  0.15355137  0.16073765  0.16073765
[134]  0.29464060  0.27253437  0.27253437  0.27253437  0.29096037  0.36851513  0.03805347
[141]  0.03805347  0.45856383  0.53985977  0.25433466  0.06188552  0.24144529  0.20751922
[148]  0.41818273  0.41818273  0.32671443  0.32671443  0.22416447  0.02768859  0.02768859
[155]  0.02768859  0.02768859  0.37292251  0.27835542  0.34663838  0.10643186  0.10643186
[162]  0.10643186  0.33999494  0.34418321  0.34418321  0.34418321  0.29464060  0.29464060
[169]  0.17753921  0.17753921  0.30720547 -0.10340773  0.34292164  0.34292164  0.29268587
[176]  0.29268587  0.32356194  0.32356194  0.32356194  0.32356194  0.32356194  0.32356194
[183]  0.32356194  0.25023824  0.56963789  0.56963789  0.26866996  0.24003431  0.24003431
[190]  0.24003431  0.30651307  0.30651307  0.30651307  0.30117783  0.22754295  0.22754295
[197]  0.22754295  0.19118901  0.34882855  0.15409377  0.35584110  0.35584110  0.37491578
[204]  0.61399084  0.61399084  0.24176238  0.21764882  0.43872434  0.14505404  0.14505404
[211]  0.14505404  0.02978949  0.02978949  0.08431564  0.29262277  0.29262277  0.38905174
[218]  0.38905174  0.38905174  0.38905174  0.42026922  0.42026922  0.30235702  0.22097413
[225]  0.12205401  0.12205401  0.17399822  0.61717242  0.61717242  0.61761999  0.61761999
[232]  0.61761999  0.27398288  0.61761999  0.61761999  0.61761999  0.19480835  0.26558110
[239]  0.19451952  0.41295883  0.26435342  0.32837936  0.32837936  0.26435342  0.26435342
[246]  0.32837936  0.24925001  0.20963798  0.20963798  0.13365066  0.13365066  0.29134080
[253]  0.33295015  0.33295015  0.38797778  0.47895822  0.22553048  0.22553048  0.68912876
[260]  0.41499376  0.39954847  0.27543253  0.39954847  0.33152789  0.33152789  0.22097413
[267]  0.50127369  0.50127369  0.28805068  0.23648851  0.33503899  0.33503899  0.13901943
[274]  0.43911955  0.34902355  0.34902355  0.17819393  0.17819393  0.37834024  0.55065745
[281]  0.55065745  0.55065745  0.55065745  0.36970979  0.36970979  0.36970979  0.28695902
[288]  0.28695902  0.39506948  0.53572440  0.38191035  0.43911955  0.43911955  0.39316535
[295]  0.39316535  0.44527993  0.44527993  0.24428052  0.24428052  0.24428052  0.26697093
[302]  0.26697093  0.26697093  0.35750881  0.64410371  0.64410371  0.39375058  0.26623076
[309]  0.13901943  0.55248690  0.55248690  0.48353609  0.20958337  0.20958337  0.20958337
[316]  0.42928597  0.55040950  0.55040950  0.42928597  0.28894442  0.25610211  0.25610211
[323]  0.25610211  0.11335804  0.11335804  0.33530894  0.33530894  0.33503899  0.20107158
[330]  0.20107158 -0.05549856  0.33295015  0.08869098  0.08869098  0.34864193  0.26524264
[337]  0.38271365  0.38271365  0.11906540  0.11906540  0.11906540  0.11906540  0.11906540
[344]  0.11906540  0.11906540  0.19296168  0.19296168  0.29257748  0.20451109  0.26075196
[351]  0.10638709  0.26075196  0.27759597  0.27759597  0.19800223  0.19800223  0.19800223
[358]  0.19800223  0.54940397  0.17737244  0.27315089  0.27315089  0.27315089  0.27315089
[365]  0.15025403  0.15025403  0.15025403  0.13972948  0.11975212  0.19480835  0.19480835
[372]  0.31679219  0.31679219  0.31679219  0.29135856  0.14708424  0.21194796  0.21194796
[379]  0.19480835  0.19480835  0.19480835  0.19480835  0.28232363  0.40382153  0.42492849
[386]  0.42492849  0.39606744  0.39606744  0.19545263  0.26866147  0.10061021  0.26284716
[393]  0.40532196  0.46191397  0.46191397  0.42251179  0.09479325  0.09479325  0.09479325
[400]  0.09479325  0.11760347  0.33932373  0.33932373  0.27632663  0.09752516  0.09752516
[407]  0.24807508  0.34536079  0.33065709  0.33065709  0.38341090  0.38341090  0.38341090
[414]  0.38341090  0.38341090  0.38341090  0.41908512  0.15868144  0.18290506  0.18290506
[421]  0.32473221  0.17621450  0.31852022  0.35420716  0.35420716  0.35420716  0.35420716
[428]  0.35420716  0.42635909  0.42635909  0.42635909  0.42635909  0.46519548  0.46519548
[435]  0.24742728  0.24742728  0.24742728  0.38648632  0.38648632  0.38648632  0.38648632
[442]  0.32494515  0.32494515  0.25818825  0.25818825  0.22183421  0.36074921  0.35419440
[449]  0.28232363  0.40000048  0.40000048  0.40000048  0.26866147  0.43936777  0.17097682
[456]  0.17097682  0.07296924  0.07296924  0.07252661  0.25975823  0.25975823  0.25975823
[463]  0.25975823  0.48333451  0.48333451  0.48333451  0.48333451  0.48333451  0.62933964
[470]  0.37921754  0.37921754  0.71599680  0.41851267  0.41851267  0.66637468  0.66637468
[477]  0.66637468  0.22065853  0.66637468  0.22065853  0.21561220  0.21228148  0.54442954
[484]  0.54442954  0.29771230  0.26977763  0.26977763  0.26977763  0.26977763  0.26977763
[491]  0.44605914  0.44605914  0.35467455  0.23727855  0.34414342  0.31451067  0.21009411
[498]  0.51107609  0.51107609  0.15329936  0.46742508  0.27574435  0.28788379  0.27944165
[505]  0.27944165  0.27944165  0.23276499  0.40663230  0.38478068  0.38478068  0.40663230
[512]  0.38478068  0.33832327  0.49188107  0.17188460  0.66733307  0.66733307  0.66733307
[519]  0.37924612  0.37924612  0.37924612  0.37924612  0.35296255  0.35296255  0.35296255
[526]  0.35296255  0.23239194  0.46783736  0.46783736  0.45933643  0.50037360  0.33256933
[533]  0.33256933  0.11522450  0.11522450  0.11522450  0.42348382  0.28213677  0.28213677
[540]  0.28213677  0.35237974  0.36962998  0.13348678  0.29510269  0.27379653  0.27379653
[547]  0.27379653  0.20024639  0.48435724  0.10413206  0.53894675  0.53894675  0.53894675
[554]  0.53894675  0.30058587  0.31918937  0.31918937  0.32310262  0.32152593  0.34089285
[561]  0.34089285  0.30032814  0.28670874  0.06072695  0.06072695  0.28510141  0.35474956
[568]  0.19084437  0.19084437  0.19084437  0.19084437  0.19084437  0.18399474  0.29962415
[575]  0.25871193  0.25871193  0.17043963  0.27061433  0.28246930  0.28246930  0.40231720
[582]  0.18399474  0.16565591  0.16565591  0.16565591  0.13141419  0.13141419  0.13141419
[589]  0.13141419  0.60347027  0.34893918  0.47167325  0.47167325  0.01942461  0.35959858
[596]  0.16684429  0.28582069  0.16684429  0.34378025  0.16684429  0.16684429  0.16684429
[603]  0.29218605  0.49606678  0.38980836  0.13688324  0.13688324  0.13688324  0.36810336
[610]  0.39171976  0.12634233  0.12634233  0.35959858  0.46148670  0.46148670  0.36722207
[617]  0.12880483  0.16684429  0.16684429  0.16684429  0.16684429  0.16684429  0.16684429
[624]  0.28002286  0.28002286  0.36554015  0.36554015  0.09121583  0.23700006  0.45689741
[631]  0.30840921  0.38239518  0.38239518  0.38239518  0.15650846  0.17676929  0.16684429
[638]  0.11087986  0.11087986  0.11087986  0.29198942  0.29198942  0.29198942  0.04749797
[645]  0.04749797  0.29672232  0.29672232  0.29672232  0.34986126  0.34986126  0.36937299
[652]  0.36937299  0.36937299  0.36937299  0.36937299  0.36937299  0.51058239  0.51058239
[659]  0.51058239  0.17936820  0.36287007  0.43324155  0.41364384  0.29646480  0.29646480
[666]  0.27581954  0.27581954  0.20932880  0.20932880  0.32480201  0.23700006  0.33134106
[673]  0.34374881  0.34374881  0.36502451  0.43573818  0.43573818  0.30863211  0.29766911
[680]  0.29766911  0.29766911  0.29766911  0.23220378  0.23220378  0.10948990  0.16639279
[687]  0.32022968  0.32022968  0.44445503  0.44445503  0.44445503  0.24001507  0.21241847
[694]  0.62469459  0.48886958  0.48886958  0.60914916  0.60914916  0.60914916  0.60914916
[701]  0.60914916  0.60914916  0.27718124  0.43350253  0.31388971  0.28003451  0.28003451
[708]  0.28003451  0.31388971  0.31388971  0.27028918  0.30839357  0.30839357  0.23635405
[715]  0.23635405  0.23635405  0.23635405  0.23635405  0.46078876  0.46078876  0.29305270
[722]  0.18662442  0.29305270  0.18662442  0.29265600  0.42247012  0.26275879  0.27063087
[729]  0.27063087  0.20421600  0.20421600  0.32550704  0.14198416  0.43834126  0.43834126
[736]  0.23906977  0.41688514  0.22830476  0.22830476  0.13620186  0.13620186  0.38084701
[743]  0.42326534  0.38084701  0.38084701  0.42326534  0.62469459  0.22805996  0.58255124
[750]  0.58255124  0.48922512  0.48306322  0.65636390  0.43135113  0.43135113  0.53997517
[757]  0.53997517  0.53997517  0.27382818  0.27382818  0.29063261  0.35043168  0.35043168
[764]  0.29956141  0.29956141  0.26275879  0.16661555  0.15290502  0.52164483  0.49754068
[771]  0.52164483  0.52164483  0.52164483  0.52164483  0.52164483  0.26869833  0.26441982
[778]  0.21834098  0.14689450  0.21398099  0.38635373  0.40625045  0.30499524  0.30499524
[785]  0.21241847  0.15666665  0.15666665  0.15666665  0.15666665  0.15666665  0.15666665
[792]  0.18336067  0.18336067  0.34033900  0.18336067  0.18336067  0.26733646  0.25249553
[799]  0.38293645  0.38293645  0.30867067  0.30867067  0.45520896  0.45520896  0.21571207
[806]  0.10628872  0.35442668  0.35442668  0.11720163  0.11720163  0.11720163  0.13411306
[813]  0.34707791  0.34707791  0.25326529  0.33024609  0.15666665  0.15666665  0.26869833
[820]  0.35495070  0.35495070  0.45061484  0.13543127  0.13543127  0.13543127  0.13543127
[827]  0.28002977  0.39779583  0.40002367  0.34416991  0.46875969  0.48659649  0.48659649
[834]  0.24494399  0.15902768  0.15902768  0.15902768  0.15902768  0.15902768  0.15902768
[841]  0.76887083  0.41143066  0.35945302  0.15400138  0.40757489  0.50589776  0.22135738
[848]  0.23845123  0.21666540  0.21666540  0.35878101  0.35878101  0.39463726  0.15913460
[855]  0.29372454  0.29372454  0.47654316  0.22844955  0.18296440  0.24166054  0.36712465
[862]  0.36712465  0.36712465  0.36712465  0.36712465  0.32095346  0.29861835  0.24549593
[869]  0.28002977  0.43365988  0.31240335  0.39788881  0.26709276  0.43918905  0.43918905
[876]  0.29433542  0.53798854  0.25488222  0.25488222  0.25488222  0.07561367  0.07561367
[883]  0.07561367  0.27713853  0.27713853  0.27713853  0.27713853  0.31658471  0.31658471
pred_y = 
case_when(
     pred_test <= .1  ~ 0.1,
     pred_test > .1 & pred_test <= .3  ~ 0.3,
     pred_test > .3 & pred_test <= .5  ~ 0.5,
     pred_test> .5             ~ 0.7,
     TRUE                      ~ 0
)
pred_y
  [1] 0.5 0.5 0.5 0.5 0.5 0.7 0.7 0.7 0.3 0.7 0.7 0.7 0.3 0.5 0.3 0.5 0.5 0.3 0.3 0.3 0.5
 [22] 0.5 0.5 0.3 0.5 0.7 0.5 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.5
 [43] 0.5 0.5 0.5 0.7 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5
 [64] 0.3 0.5 0.7 0.3 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.7 0.7 0.7 0.7 0.7 0.5
 [85] 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[106] 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.5
[127] 0.5 0.5 0.5 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.1 0.1 0.5 0.7 0.3 0.1 0.3 0.3
[148] 0.5 0.5 0.5 0.5 0.3 0.1 0.1 0.1 0.1 0.5 0.3 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3
[169] 0.3 0.3 0.5 0.1 0.5 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.7 0.7 0.3 0.3 0.3
[190] 0.3 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.5 0.3 0.5 0.5 0.5 0.7 0.7 0.3 0.3 0.5 0.3 0.3
[211] 0.3 0.1 0.1 0.1 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.7 0.7 0.7 0.7
[232] 0.7 0.3 0.7 0.7 0.7 0.3 0.3 0.3 0.5 0.3 0.5 0.5 0.3 0.3 0.5 0.3 0.3 0.3 0.3 0.3 0.3
[253] 0.5 0.5 0.5 0.5 0.3 0.3 0.7 0.5 0.5 0.3 0.5 0.5 0.5 0.3 0.7 0.7 0.3 0.3 0.5 0.5 0.3
[274] 0.5 0.5 0.5 0.3 0.3 0.5 0.7 0.7 0.7 0.7 0.5 0.5 0.5 0.3 0.3 0.5 0.7 0.5 0.5 0.5 0.5
[295] 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.7 0.7 0.5 0.3 0.3 0.7 0.7 0.5 0.3 0.3 0.3
[316] 0.5 0.7 0.7 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.3 0.1 0.5 0.1 0.1 0.5 0.3
[337] 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[358] 0.3 0.7 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.3
[379] 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.1 0.1 0.1
[400] 0.1 0.3 0.5 0.5 0.3 0.1 0.1 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3
[421] 0.5 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.5
[442] 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.3 0.5 0.5 0.5 0.3 0.5 0.3 0.3 0.1 0.1 0.1 0.3 0.3 0.3
[463] 0.3 0.5 0.5 0.5 0.5 0.5 0.7 0.5 0.5 0.7 0.5 0.5 0.7 0.7 0.7 0.3 0.7 0.3 0.3 0.3 0.7
[484] 0.7 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.5 0.5 0.3 0.7 0.7 0.3 0.5 0.3 0.3 0.3
[505] 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.7 0.7 0.7 0.5 0.5 0.5 0.5 0.5 0.5 0.5
[526] 0.5 0.3 0.5 0.5 0.5 0.7 0.5 0.5 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.3
[547] 0.3 0.3 0.5 0.3 0.7 0.7 0.7 0.7 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.1 0.1 0.3 0.5
[568] 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[589] 0.3 0.7 0.5 0.5 0.5 0.1 0.5 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.5
[610] 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.1 0.3 0.5
[631] 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.3 0.3 0.3 0.5 0.5 0.5
[652] 0.5 0.5 0.5 0.5 0.5 0.7 0.7 0.7 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.5
[673] 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.3 0.3
[694] 0.7 0.5 0.5 0.7 0.7 0.7 0.7 0.7 0.7 0.3 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.3 0.5 0.5 0.3
[715] 0.3 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.5 0.5
[736] 0.3 0.5 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.7 0.3 0.7 0.7 0.5 0.5 0.7 0.5 0.5 0.7
[757] 0.7 0.7 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.7 0.5 0.7 0.7 0.7 0.7 0.7 0.3 0.3
[778] 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.3
[799] 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.3 0.5 0.5 0.3 0.5 0.3 0.3 0.3
[820] 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[841] 0.7 0.5 0.5 0.3 0.5 0.7 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.5
[862] 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.5 0.5 0.3 0.7 0.3 0.3 0.3 0.1 0.1
[883] 0.1 0.3 0.3 0.3 0.3 0.5 0.5
test_y= 
case_when(
     y_test <= .1  ~ 0.1,
     y_test > .1 & y_test <= .3  ~ 0.3,
     y_test > .3 & y_test <= .5  ~ 0.5,
     y_test > .5             ~ 0.7,
     TRUE                      ~ 0
)
test_y
  [1] 0.5 0.3 0.3 0.3 0.3 0.7 0.7 0.7 0.1 0.7 0.7 0.7 0.1 0.3 0.3 0.7 0.7 0.3 0.5 0.3 0.5
 [22] 0.5 0.1 0.1 0.5 0.7 0.5 0.1 0.5 0.7 0.7 0.7 0.5 0.5 0.1 0.3 0.3 0.3 0.3 0.1 0.1 0.3
 [43] 0.3 0.3 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.7 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3
 [64] 0.1 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.1 0.1 0.5 0.5 0.5 0.5 0.5 0.5
 [85] 0.3 0.5 0.5 0.5 0.7 0.1 0.1 0.7 0.1 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.1
[106] 0.1 0.1 0.1 0.1 0.1 0.1 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.1 0.3 0.3 0.3 0.1 0.1 0.7
[127] 0.1 0.5 0.5 0.1 0.1 0.3 0.3 0.1 0.1 0.1 0.1 0.3 0.5 0.1 0.1 0.3 0.7 0.3 0.1 0.5 0.1
[148] 0.5 0.5 0.3 0.3 0.5 0.1 0.1 0.1 0.1 0.7 0.1 0.5 0.3 0.3 0.3 0.5 0.7 0.7 0.7 0.3 0.3
[169] 0.1 0.1 0.7 0.1 0.3 0.3 0.1 0.1 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.7 0.7 0.1 0.3 0.3
[190] 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.1 0.5 0.3 0.5 0.5 0.5 0.5 0.5 0.3 0.5 0.5 0.3 0.3
[211] 0.3 0.1 0.1 0.3 0.5 0.5 0.1 0.1 0.1 0.1 0.5 0.5 0.3 0.7 0.3 0.3 0.3 0.3 0.3 0.7 0.7
[232] 0.7 0.1 0.7 0.7 0.7 0.5 0.3 0.3 0.7 0.3 0.7 0.7 0.3 0.3 0.7 0.5 0.3 0.3 0.3 0.3 0.5
[253] 0.5 0.5 0.5 0.1 0.1 0.1 0.3 0.5 0.5 0.3 0.5 0.7 0.7 0.1 0.7 0.7 0.3 0.5 0.3 0.3 0.1
[274] 0.3 0.3 0.3 0.3 0.3 0.7 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.7 0.7 0.7 0.7 0.3 0.5 0.5 0.7
[295] 0.7 0.3 0.3 0.1 0.1 0.1 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.1 0.1 0.1
[316] 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.1 0.1 0.7 0.3 0.1 0.1 0.1 0.3
[337] 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.1 0.3 0.1 0.3 0.1 0.3 0.3 0.1 0.1 0.1
[358] 0.1 0.7 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.7 0.3 0.3 0.3 0.3 0.3 0.7 0.5 0.1 0.1
[379] 0.1 0.1 0.3 0.1 0.3 0.3 0.5 0.5 0.3 0.3 0.7 0.3 0.1 0.1 0.5 0.5 0.5 0.5 0.1 0.1 0.1
[400] 0.1 0.3 0.7 0.7 0.1 0.3 0.3 0.3 0.5 0.3 0.3 0.7 0.7 0.7 0.7 0.7 0.7 0.5 0.5 0.1 0.1
[421] 0.7 0.1 0.3 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.7 0.7 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[442] 0.5 0.5 0.3 0.3 0.1 0.3 0.5 0.3 0.5 0.5 0.5 0.3 0.5 0.3 0.3 0.1 0.1 0.5 0.1 0.1 0.1
[463] 0.1 0.5 0.5 0.5 0.5 0.5 0.7 0.3 0.3 0.5 0.3 0.3 0.7 0.7 0.7 0.1 0.7 0.1 0.5 0.3 0.7
[484] 0.7 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.1 0.1 0.3 0.7 0.7 0.1 0.7 0.3 0.5 0.3
[505] 0.3 0.3 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.7 0.7 0.7 0.5 0.5 0.5 0.5 0.5 0.5 0.5
[526] 0.5 0.7 0.3 0.3 0.5 0.7 0.3 0.3 0.1 0.1 0.1 0.3 0.3 0.3 0.3 0.5 0.7 0.1 0.3 0.5 0.5
[547] 0.5 0.3 0.3 0.1 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.1 0.1 0.1 0.1 0.5
[568] 0.3 0.3 0.3 0.3 0.3 0.1 0.3 0.3 0.3 0.3 0.1 0.1 0.1 0.5 0.3 0.3 0.3 0.3 0.1 0.1 0.1
[589] 0.1 0.7 0.1 0.5 0.5 0.1 0.1 0.3 0.3 0.1 0.3 0.1 0.1 0.1 0.3 0.3 0.5 0.1 0.1 0.1 0.3
[610] 0.3 0.1 0.1 0.1 0.5 0.5 0.3 0.5 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.5 0.5 0.3 0.5 0.3
[631] 0.3 0.7 0.7 0.7 0.5 0.1 0.3 0.1 0.1 0.1 0.7 0.7 0.7 0.1 0.1 0.3 0.3 0.3 0.1 0.1 0.5
[652] 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.3 0.7 0.1 0.1 0.1 0.3 0.3 0.3 0.3 0.5 0.1 0.7
[673] 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.5 0.1 0.1 0.5 0.5 0.5 0.3 0.3
[694] 0.5 0.3 0.3 0.7 0.7 0.7 0.7 0.7 0.7 0.3 0.7 0.3 0.7 0.7 0.7 0.3 0.3 0.5 0.3 0.3 0.3
[715] 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.1 0.1 0.5 0.5 0.3 0.3 0.5 0.1 0.7 0.7
[736] 0.3 0.1 0.3 0.3 0.1 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.7 0.7 0.5 0.5 0.7 0.7 0.7 0.5
[757] 0.5 0.5 0.1 0.1 0.3 0.5 0.5 0.1 0.1 0.3 0.5 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5
[778] 0.3 0.1 0.7 0.1 0.3 0.1 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.1 0.1 0.1 0.3 0.3
[799] 0.1 0.1 0.3 0.3 0.5 0.5 0.1 0.3 0.3 0.3 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.3
[820] 0.3 0.3 0.5 0.1 0.1 0.1 0.1 0.3 0.3 0.7 0.5 0.5 0.5 0.5 0.5 0.1 0.1 0.1 0.1 0.1 0.1
[841] 0.7 0.5 0.5 0.3 0.5 0.7 0.5 0.3 0.1 0.1 0.3 0.3 0.5 0.5 0.3 0.3 0.7 0.1 0.1 0.3 0.3
[862] 0.3 0.3 0.3 0.3 0.5 0.3 0.1 0.7 0.7 0.3 0.5 0.3 0.5 0.5 0.3 0.5 0.3 0.3 0.3 0.1 0.1
[883] 0.1 0.1 0.1 0.1 0.1 0.5 0.5
pred_y = as.factor(pred_y)
print(pred_y)
  [1] 0.5 0.5 0.5 0.5 0.5 0.7 0.7 0.7 0.3 0.7 0.7 0.7 0.3 0.5 0.3 0.5 0.5 0.3 0.3 0.3 0.5
 [22] 0.5 0.5 0.3 0.5 0.7 0.5 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.5
 [43] 0.5 0.5 0.5 0.7 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5
 [64] 0.3 0.5 0.7 0.3 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.7 0.7 0.7 0.7 0.7 0.5
 [85] 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[106] 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.5
[127] 0.5 0.5 0.5 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.1 0.1 0.5 0.7 0.3 0.1 0.3 0.3
[148] 0.5 0.5 0.5 0.5 0.3 0.1 0.1 0.1 0.1 0.5 0.3 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3
[169] 0.3 0.3 0.5 0.1 0.5 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.7 0.7 0.3 0.3 0.3
[190] 0.3 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.5 0.3 0.5 0.5 0.5 0.7 0.7 0.3 0.3 0.5 0.3 0.3
[211] 0.3 0.1 0.1 0.1 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.7 0.7 0.7 0.7
[232] 0.7 0.3 0.7 0.7 0.7 0.3 0.3 0.3 0.5 0.3 0.5 0.5 0.3 0.3 0.5 0.3 0.3 0.3 0.3 0.3 0.3
[253] 0.5 0.5 0.5 0.5 0.3 0.3 0.7 0.5 0.5 0.3 0.5 0.5 0.5 0.3 0.7 0.7 0.3 0.3 0.5 0.5 0.3
[274] 0.5 0.5 0.5 0.3 0.3 0.5 0.7 0.7 0.7 0.7 0.5 0.5 0.5 0.3 0.3 0.5 0.7 0.5 0.5 0.5 0.5
[295] 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.7 0.7 0.5 0.3 0.3 0.7 0.7 0.5 0.3 0.3 0.3
[316] 0.5 0.7 0.7 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.3 0.1 0.5 0.1 0.1 0.5 0.3
[337] 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[358] 0.3 0.7 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.3
[379] 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.1 0.1 0.1
[400] 0.1 0.3 0.5 0.5 0.3 0.1 0.1 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3
[421] 0.5 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.5
[442] 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.3 0.5 0.5 0.5 0.3 0.5 0.3 0.3 0.1 0.1 0.1 0.3 0.3 0.3
[463] 0.3 0.5 0.5 0.5 0.5 0.5 0.7 0.5 0.5 0.7 0.5 0.5 0.7 0.7 0.7 0.3 0.7 0.3 0.3 0.3 0.7
[484] 0.7 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.5 0.5 0.3 0.7 0.7 0.3 0.5 0.3 0.3 0.3
[505] 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.7 0.7 0.7 0.5 0.5 0.5 0.5 0.5 0.5 0.5
[526] 0.5 0.3 0.5 0.5 0.5 0.7 0.5 0.5 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.3
[547] 0.3 0.3 0.5 0.3 0.7 0.7 0.7 0.7 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.1 0.1 0.3 0.5
[568] 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[589] 0.3 0.7 0.5 0.5 0.5 0.1 0.5 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.5
[610] 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.1 0.3 0.5
[631] 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.3 0.3 0.3 0.5 0.5 0.5
[652] 0.5 0.5 0.5 0.5 0.5 0.7 0.7 0.7 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.5
[673] 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.3 0.3
[694] 0.7 0.5 0.5 0.7 0.7 0.7 0.7 0.7 0.7 0.3 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.3 0.5 0.5 0.3
[715] 0.3 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.5 0.5
[736] 0.3 0.5 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.7 0.3 0.7 0.7 0.5 0.5 0.7 0.5 0.5 0.7
[757] 0.7 0.7 0.3 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.7 0.5 0.7 0.7 0.7 0.7 0.7 0.3 0.3
[778] 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.3
[799] 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.5 0.5 0.3 0.3 0.3 0.3 0.5 0.5 0.3 0.5 0.3 0.3 0.3
[820] 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[841] 0.7 0.5 0.5 0.3 0.5 0.7 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.5
[862] 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.5 0.5 0.3 0.7 0.3 0.3 0.3 0.1 0.1
[883] 0.1 0.3 0.3 0.3 0.3 0.5 0.5
Levels: 0.1 0.3 0.5 0.7
test_y = as.factor(test_y)
print(test_y)
  [1] 0.5 0.3 0.3 0.3 0.3 0.7 0.7 0.7 0.1 0.7 0.7 0.7 0.1 0.3 0.3 0.7 0.7 0.3 0.5 0.3 0.5
 [22] 0.5 0.1 0.1 0.5 0.7 0.5 0.1 0.5 0.7 0.7 0.7 0.5 0.5 0.1 0.3 0.3 0.3 0.3 0.1 0.1 0.3
 [43] 0.3 0.3 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.7 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3
 [64] 0.1 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.1 0.1 0.5 0.5 0.5 0.5 0.5 0.5
 [85] 0.3 0.5 0.5 0.5 0.7 0.1 0.1 0.7 0.1 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.1
[106] 0.1 0.1 0.1 0.1 0.1 0.1 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.1 0.3 0.3 0.3 0.1 0.1 0.7
[127] 0.1 0.5 0.5 0.1 0.1 0.3 0.3 0.1 0.1 0.1 0.1 0.3 0.5 0.1 0.1 0.3 0.7 0.3 0.1 0.5 0.1
[148] 0.5 0.5 0.3 0.3 0.5 0.1 0.1 0.1 0.1 0.7 0.1 0.5 0.3 0.3 0.3 0.5 0.7 0.7 0.7 0.3 0.3
[169] 0.1 0.1 0.7 0.1 0.3 0.3 0.1 0.1 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.7 0.7 0.1 0.3 0.3
[190] 0.3 0.3 0.3 0.3 0.5 0.3 0.3 0.3 0.1 0.5 0.3 0.5 0.5 0.5 0.5 0.5 0.3 0.5 0.5 0.3 0.3
[211] 0.3 0.1 0.1 0.3 0.5 0.5 0.1 0.1 0.1 0.1 0.5 0.5 0.3 0.7 0.3 0.3 0.3 0.3 0.3 0.7 0.7
[232] 0.7 0.1 0.7 0.7 0.7 0.5 0.3 0.3 0.7 0.3 0.7 0.7 0.3 0.3 0.7 0.5 0.3 0.3 0.3 0.3 0.5
[253] 0.5 0.5 0.5 0.1 0.1 0.1 0.3 0.5 0.5 0.3 0.5 0.7 0.7 0.1 0.7 0.7 0.3 0.5 0.3 0.3 0.1
[274] 0.3 0.3 0.3 0.3 0.3 0.7 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.7 0.7 0.7 0.7 0.3 0.5 0.5 0.7
[295] 0.7 0.3 0.3 0.1 0.1 0.1 0.3 0.3 0.3 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.5 0.1 0.1 0.1
[316] 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.1 0.1 0.7 0.3 0.1 0.1 0.1 0.3
[337] 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.1 0.3 0.1 0.3 0.1 0.3 0.3 0.1 0.1 0.1
[358] 0.1 0.7 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.7 0.3 0.3 0.3 0.3 0.3 0.7 0.5 0.1 0.1
[379] 0.1 0.1 0.3 0.1 0.3 0.3 0.5 0.5 0.3 0.3 0.7 0.3 0.1 0.1 0.5 0.5 0.5 0.5 0.1 0.1 0.1
[400] 0.1 0.3 0.7 0.7 0.1 0.3 0.3 0.3 0.5 0.3 0.3 0.7 0.7 0.7 0.7 0.7 0.7 0.5 0.5 0.1 0.1
[421] 0.7 0.1 0.3 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.7 0.7 0.3 0.3 0.3 0.3 0.3 0.3 0.3
[442] 0.5 0.5 0.3 0.3 0.1 0.3 0.5 0.3 0.5 0.5 0.5 0.3 0.5 0.3 0.3 0.1 0.1 0.5 0.1 0.1 0.1
[463] 0.1 0.5 0.5 0.5 0.5 0.5 0.7 0.3 0.3 0.5 0.3 0.3 0.7 0.7 0.7 0.1 0.7 0.1 0.5 0.3 0.7
[484] 0.7 0.3 0.3 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.1 0.1 0.3 0.7 0.7 0.1 0.7 0.3 0.5 0.3
[505] 0.3 0.3 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.7 0.7 0.7 0.5 0.5 0.5 0.5 0.5 0.5 0.5
[526] 0.5 0.7 0.3 0.3 0.5 0.7 0.3 0.3 0.1 0.1 0.1 0.3 0.3 0.3 0.3 0.5 0.7 0.1 0.3 0.5 0.5
[547] 0.5 0.3 0.3 0.1 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.1 0.1 0.1 0.1 0.5
[568] 0.3 0.3 0.3 0.3 0.3 0.1 0.3 0.3 0.3 0.3 0.1 0.1 0.1 0.5 0.3 0.3 0.3 0.3 0.1 0.1 0.1
[589] 0.1 0.7 0.1 0.5 0.5 0.1 0.1 0.3 0.3 0.1 0.3 0.1 0.1 0.1 0.3 0.3 0.5 0.1 0.1 0.1 0.3
[610] 0.3 0.1 0.1 0.1 0.5 0.5 0.3 0.5 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.5 0.5 0.3 0.5 0.3
[631] 0.3 0.7 0.7 0.7 0.5 0.1 0.3 0.1 0.1 0.1 0.7 0.7 0.7 0.1 0.1 0.3 0.3 0.3 0.1 0.1 0.5
[652] 0.5 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.5 0.3 0.7 0.1 0.1 0.1 0.3 0.3 0.3 0.3 0.5 0.1 0.7
[673] 0.3 0.3 0.5 0.5 0.5 0.5 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.5 0.1 0.1 0.5 0.5 0.5 0.3 0.3
[694] 0.5 0.3 0.3 0.7 0.7 0.7 0.7 0.7 0.7 0.3 0.7 0.3 0.7 0.7 0.7 0.3 0.3 0.5 0.3 0.3 0.3
[715] 0.3 0.3 0.3 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.1 0.1 0.5 0.5 0.3 0.3 0.5 0.1 0.7 0.7
[736] 0.3 0.1 0.3 0.3 0.1 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.7 0.7 0.5 0.5 0.7 0.7 0.7 0.5
[757] 0.5 0.5 0.1 0.1 0.3 0.5 0.5 0.1 0.1 0.3 0.5 0.3 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5
[778] 0.3 0.1 0.7 0.1 0.3 0.1 0.1 0.3 0.3 0.3 0.3 0.3 0.3 0.3 0.1 0.1 0.1 0.1 0.1 0.3 0.3
[799] 0.1 0.1 0.3 0.3 0.5 0.5 0.1 0.3 0.3 0.3 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.3
[820] 0.3 0.3 0.5 0.1 0.1 0.1 0.1 0.3 0.3 0.7 0.5 0.5 0.5 0.5 0.5 0.1 0.1 0.1 0.1 0.1 0.1
[841] 0.7 0.5 0.5 0.3 0.5 0.7 0.5 0.3 0.1 0.1 0.3 0.3 0.5 0.5 0.3 0.3 0.7 0.1 0.1 0.3 0.3
[862] 0.3 0.3 0.3 0.3 0.5 0.3 0.1 0.7 0.7 0.3 0.5 0.3 0.5 0.5 0.3 0.5 0.3 0.3 0.3 0.1 0.1
[883] 0.1 0.1 0.1 0.1 0.1 0.5 0.5
Levels: 0.1 0.3 0.5 0.7
library(caret)

#Creates vectors having data points
expected_value <- factor(test_y)
predicted_value <- factor(pred_y)

#Creating confusion matrix
example <- confusionMatrix(data=predicted_value, reference = expected_value)

#Display results 
example
Confusion Matrix and Statistics

          Reference
Prediction 0.1 0.3 0.5 0.7
       0.1  29   4   1   1
       0.3 163 180  44  15
       0.5  34 120 161  50
       0.7   0  11  31  45

Overall Statistics
                                          
               Accuracy : 0.4668          
                 95% CI : (0.4336, 0.5002)
    No Information Rate : 0.3543          
    P-Value [Acc > NIR] : 3.662e-12       
                                          
                  Kappa : 0.247           
                                          
 Mcnemar's Test P-Value : < 2.2e-16       

Statistics by Class:

                     Class: 0.1 Class: 0.3 Class: 0.5 Class: 0.7
Sensitivity             0.12832     0.5714     0.6793    0.40541
Specificity             0.99095     0.6132     0.6871    0.94602
Pos Pred Value          0.82857     0.4478     0.4411    0.51724
Neg Pred Value          0.76932     0.7228     0.8550    0.91771
Prevalence              0.25422     0.3543     0.2666    0.12486
Detection Rate          0.03262     0.2025     0.1811    0.05062
Detection Prevalence    0.03937     0.4522     0.4106    0.09786
Balanced Accuracy       0.55963     0.5923     0.6832    0.67571
importance =xgb.importance(model = model)
print(xgb.plot.importance(importance_matrix = importance[1:9]))

# a plot with all the trees
xgb.plot.tree(model = model)

# this seems to be a mess. Hence, we only stick to 1 tree at a time. The below code is to plot first tree and show its node ID
xgb.plot.tree(model = model, trees = 0, show_node_id = TRUE)
LS0tDQp0aXRsZTogIlIgTm90ZWJvb2siDQpvdXRwdXQ6IGh0bWxfbm90ZWJvb2sNCi0tLQ0KDQpUaGlzIGlzIGFuIFtSIE1hcmtkb3duXShodHRwOi8vcm1hcmtkb3duLnJzdHVkaW8uY29tKSBOb3RlYm9vay4gV2hlbiB5b3UgZXhlY3V0ZSBjb2RlIHdpdGhpbiB0aGUgbm90ZWJvb2ssIHRoZSByZXN1bHRzIGFwcGVhciBiZW5lYXRoIHRoZSBjb2RlLiANCg0KVHJ5IGV4ZWN1dGluZyB0aGlzIGNodW5rIGJ5IGNsaWNraW5nIHRoZSAqUnVuKiBidXR0b24gd2l0aGluIHRoZSBjaHVuayBvciBieSBwbGFjaW5nIHlvdXIgY3Vyc29yIGluc2lkZSBpdCBhbmQgcHJlc3NpbmcgKkN0cmwrU2hpZnQrRW50ZXIqLiANCg0KYGBge3J9DQpsaWJyYXJ5KHhnYm9vc3QpDQpsaWJyYXJ5KGNhcmV0KSAgDQpsaWJyYXJ5KGUxMDcxKSAgDQpgYGANCg0KDQpgYGB7cn0NCnNldHdkKCJDOi9Vc2Vycy9jbWhvbi9Eb3dubG9hZHMiKQ0KbGlicmFyeSh0aWdlcnN0YXRzICkNCmxpYnJhcnkodGlkeXZlcnNlKQ0KbGlicmFyeShEVCkNCmxpYnJhcnkocHN5Y2gpDQoNCiMgUExTOlNFTSANCmRmID1yZWFkLmNzdigiZGYuY3N2IikNCg0KY29sbmFtZXMoZGYpDQpgYGANCg0KYGBge3J9DQpkZiRJRAkgICAgICAgICAgPSAgIGFzLmNoYXJhY3RlcihkZiRJRCkNCmRmJGdlbmRlcgkgICAgICA9ICAgYXMuZmFjdG9yKGRmJGdlbmRlcikNCmRmJGFnZQkgICAgICAgID0gICBhcy5mYWN0b3IoZGYkYWdlKQ0KZGYkcmVnaW9uCSAgICAgID0gICBhcy5mYWN0b3IoZGYkcmVnaW9uKQ0KZGYkc21hcnRwaG9uZSAgID0gICBhcy5mYWN0b3IoZGYkc21hcnRwaG9uZSkNCiNkZiRPbl9TaW1wbGUJPSAgIGFzLm51bWVyaWMoZGYkT25fU2ltcGxlKQ0KZGYkT25fQ2FyZAkgID0gICBhcy5udW1lcmljKGRmJE9uX0NhcmQJKQ0KZGYkT2ZmX1NpbXBsZSA9ICBhcy5udW1lcmljKGRmJE9mZl9TaW1wbGUpDQpkZiRPZmZfQ2FyZCAgPSAgIGFzLm51bWVyaWMoZGYkT2ZmX0NhcmQpDQpgYGANCg0KDQpgYGB7cn0NCiNkYXRhIDwtIGlyaXMgICAgICAgICAgICAgICAjIHJlYWRzIHRoZSBkYXRhc2V0DQpkYXRhID0gZGYNCmhlYWQoZGF0YSkgICAgICAgICAgICMgaGVhZCgpIHJldHVybnMgdGhlIHRvcCA2IHJvd3Mgb2YgdGhlIGRhdGFmcmFtZQ0KDQpzdW1tYXJ5KGRhdGEpICAgICAgICMgcmV0dXJucyB0aGUgc3RhdGlzdGljYWwgc3VtbWFyeSBvZiB0aGUgZGF0YSBjb2x1bW5zDQoNCmRpbShkYXRhKQ0KZGF0YSA9IG5hLm9taXQoZGF0YSkNCnN0cihkYXRhKQ0KYGBgDQoNCmBgYHtyfQ0KZGF0YSA8LSBkYXRhJT4lDQogICAgICAgIG11dGF0ZShodHAgPSBvbl9TaW1wbGUgIC8gWDVfcGF5LmFtb3VudA0KKQ0KDQpxdWFudGlsZShkYXRhJGh0cCwgcHJvYnMgPSBzZXEoLjEsIC45LCBieSA9IC4xKSkNCg0KZGF0YSA8LSBkYXRhICU+JQ0KICAgICBtdXRhdGUoIGh0cF95ID0gY2FzZV93aGVuKA0KICAgICBodHAgPD0gLjEgIH4gMC4xLA0KICAgICBodHAgPiAuMSAmIGh0cCA8PSAuMyAgfiAwLjMsDQogICAgIGh0cCA+IC4zICYgaHRwIDw9IC41ICB+IDAuNSwNCiAgICAgaHRwID4gLjUgICAgICAgICAgICAgfiAwLjcsDQogICAgIFRSVUUgICAgICAgICAgICAgICAgICAgICAgfiAwDQogICAgKSkNCiAgDQoNCmBgYA0KDQoNCmBgYHtyfQ0KIyBjcmVhdGVEYXRhUGFydGl0aW9uKCkgZnVuY3Rpb24gZnJvbSB0aGUgY2FyZXQgcGFja2FnZSB0byBzcGxpdCB0aGUgb3JpZ2luYWwgZGF0YXNldCBpbnRvIGEgdHJhaW5pbmcgYW5kIHRlc3Rpbmcgc2V0IGFuZCBzcGxpdCBkYXRhIGludG8gdHJhaW5pbmcgKDgwJSkgYW5kIHRlc3Rpbmcgc2V0ICgyMCUpDQpwYXJ0cyA9IGNyZWF0ZURhdGFQYXJ0aXRpb24oZGF0YSRYM19jYXJkLCBwID0gMC43LCBsaXN0ID0gRikNCnRyYWluID0gZGF0YVtwYXJ0cywgXQ0KdGVzdCA9IGRhdGFbLXBhcnRzLCBdDQoNClhfdHJhaW4gPSBkYXRhLm1hdHJpeCh0cmFpblssYyg1OjgsIDE5OjIyKV0pICAgICAgICAgICAgICAgICAgIyBpbmRlcGVuZGVudCB2YXJpYWJsZXMgZm9yIHRyYWluDQp5X3RyYWluID0gdHJhaW5bLDIzXSAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIyBkZXBlbmRlbnQgdmFyaWFibGVzIGZvciB0cmFpbg0KICANClhfdGVzdCA9IGRhdGEubWF0cml4KHRlc3RbLGMoNTo4LCAxOToyMildKSAgICAgICAgICAgICAgICAgICAgIyBpbmRlcGVuZGVudCB2YXJpYWJsZXMgZm9yIHRlc3QNCnlfdGVzdCA9IHRlc3RbLDIzXSAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIyBkZXBlbmRlbnQgdmFyaWFibGVzIGZvciB0ZXN0DQoNCiMgY29udmVydCB0aGUgdHJhaW4gYW5kIHRlc3QgZGF0YSBpbnRvIHhnYm9vc3QgbWF0cml4IHR5cGUuDQp4Z2Jvb3N0X3RyYWluID0geGdiLkRNYXRyaXgoZGF0YT1YX3RyYWluLCBsYWJlbD15X3RyYWluKQ0KeGdib29zdF90ZXN0ID0geGdiLkRNYXRyaXgoZGF0YT1YX3Rlc3QsIGxhYmVsPXlfdGVzdCkNCg0KI1N0ZXAgNCAtIENyZWF0ZSBhIHhnYm9vc3QgbW9kZWwNCg0KIyB0cmFpbiBhIG1vZGVsIHVzaW5nIG91ciB0cmFpbmluZyBkYXRhDQptb2RlbCA8LSB4Z2Jvb3N0KGRhdGEgPSB4Z2Jvb3N0X3RyYWluLCAgICAgICAgICAgICAgICAgICAgIyB0aGUgZGF0YSAgIA0KICAgICAgICAgICAgICAgICBtYXguZGVwdGg9MywgICAgICAgICAgICAgICAgICAgICAgICAjIG1heCBkZXB0aCANCiAgICAgICAgICAgICAgICAgbnJvdW5kcz01MCkgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAjIG1heCBudW1iZXIgb2YgYm9vc3RpbmcgaXRlcmF0aW9ucw0KDQpzdW1tYXJ5KG1vZGVsKQ0KYGBgDQoNCg0KYGBge3J9DQojdXNlIG1vZGVsIHRvIG1ha2UgcHJlZGljdGlvbnMgb24gdGVzdCBkYXRhDQpwcmVkX3Rlc3QgPSBwcmVkaWN0KG1vZGVsLCB4Z2Jvb3N0X3Rlc3QpDQoNCnByZWRfdGVzdA0KYGBgDQoNCmBgYHtyfQ0KcHJlZF95ID0gDQpjYXNlX3doZW4oDQogICAgIHByZWRfdGVzdCA8PSAuMSAgfiAwLjEsDQogICAgIHByZWRfdGVzdCA+IC4xICYgcHJlZF90ZXN0IDw9IC4zICB+IDAuMywNCiAgICAgcHJlZF90ZXN0ID4gLjMgJiBwcmVkX3Rlc3QgPD0gLjUgIH4gMC41LA0KICAgICBwcmVkX3Rlc3Q+IC41ICAgICAgICAgICAgIH4gMC43LA0KICAgICBUUlVFICAgICAgICAgICAgICAgICAgICAgIH4gMA0KKQ0KcHJlZF95DQpgYGANCg0KYGBge3J9DQp0ZXN0X3k9IA0KY2FzZV93aGVuKA0KICAgICB5X3Rlc3QgPD0gLjEgIH4gMC4xLA0KICAgICB5X3Rlc3QgPiAuMSAmIHlfdGVzdCA8PSAuMyAgfiAwLjMsDQogICAgIHlfdGVzdCA+IC4zICYgeV90ZXN0IDw9IC41ICB+IDAuNSwNCiAgICAgeV90ZXN0ID4gLjUgICAgICAgICAgICAgfiAwLjcsDQogICAgIFRSVUUgICAgICAgICAgICAgICAgICAgICAgfiAwDQopDQp0ZXN0X3kNCmBgYA0KDQoNCg0KDQpgYGB7cn0NCnByZWRfeSA9IGFzLmZhY3RvcihwcmVkX3kpDQpwcmludChwcmVkX3kpDQp0ZXN0X3kgPSBhcy5mYWN0b3IodGVzdF95KQ0KcHJpbnQodGVzdF95KQ0KYGBgDQoNCmBgYHtyfQ0KbGlicmFyeShjYXJldCkNCg0KI0NyZWF0ZXMgdmVjdG9ycyBoYXZpbmcgZGF0YSBwb2ludHMNCmV4cGVjdGVkX3ZhbHVlIDwtIGZhY3Rvcih0ZXN0X3kpDQpwcmVkaWN0ZWRfdmFsdWUgPC0gZmFjdG9yKHByZWRfeSkNCg0KI0NyZWF0aW5nIGNvbmZ1c2lvbiBtYXRyaXgNCmV4YW1wbGUgPC0gY29uZnVzaW9uTWF0cml4KGRhdGE9cHJlZGljdGVkX3ZhbHVlLCByZWZlcmVuY2UgPSBleHBlY3RlZF92YWx1ZSkNCg0KI0Rpc3BsYXkgcmVzdWx0cyANCmV4YW1wbGUNCg0KYGBgDQoNCg0KDQpgYGB7cn0NCmltcG9ydGFuY2UgPXhnYi5pbXBvcnRhbmNlKG1vZGVsID0gbW9kZWwpDQpgYGANCg0KYGBge3J9DQpwcmludCh4Z2IucGxvdC5pbXBvcnRhbmNlKGltcG9ydGFuY2VfbWF0cml4ID0gaW1wb3J0YW5jZVsxOjldKSkNCmBgYA0KDQoNCmBgYHtyfQ0KIyBhIHBsb3Qgd2l0aCBhbGwgdGhlIHRyZWVzDQp4Z2IucGxvdC50cmVlKG1vZGVsID0gbW9kZWwpDQoNCiMgdGhpcyBzZWVtcyB0byBiZSBhIG1lc3MuIEhlbmNlLCB3ZSBvbmx5IHN0aWNrIHRvIDEgdHJlZSBhdCBhIHRpbWUuIFRoZSBiZWxvdyBjb2RlIGlzIHRvIHBsb3QgZmlyc3QgdHJlZSBhbmQgc2hvdyBpdHMgbm9kZSBJRA0KeGdiLnBsb3QudHJlZShtb2RlbCA9IG1vZGVsLCB0cmVlcyA9IDAsIHNob3dfbm9kZV9pZCA9IFRSVUUpDQpgYGANCg0K