Loading and preprocessing the data

library(rpart)                     # R package for decision Tree
library(caret)                     # R package for decision Tree
## Loading required package: lattice
## Loading required package: ggplot2
setwd("D:/Users/gkokate/Desktop/Markdown")
build <- read.csv(file = "Build .csv",sep = ",", header = TRUE)
test1 <- read.csv(file = "test.csv", sep = "," , header = TRUE)
# Sample observations 
head(build)
##   gponOntAniOpInfoOpticalSignalLevel gponOntAniOpInfoTxOpticalSignalLevel
## 1                              -7711                                 1142
## 2                              -7703                                 1288
## 3                              -7703                                 1081
## 4                              -7703                                 1207
## 5                              -7688                                 1276
## 6                              -7688                                 1282
##   gponOntOltsideOpInfoRxOpticalSignalLevel X15MinDnFwdByteCounter
## 1                                     -171               8.888281
## 2                                     -170               9.544178
## 3                                     -170               8.915710
## 4                                     -171               7.555582
## 5                                     -170               7.475159
## 6                                     -169               7.236687
##   X15MinUpFwdByteCounter bponOntOpInfoDistance ifOperStatus
## 1               7.245204                    38           up
## 2               7.764763                    38           up
## 3               7.648214                    38           up
## 4               6.976296                    38           up
## 5               6.646812                    38           up
## 6               6.449259                    38           up
#dependent variable as a factor (categorical)
build$ifOperStatus <- as.factor(build$ifOperStatus)
# Split data into training (70%) and validation (30%)
split <- sample(nrow(build),floor(nrow(build)*0.7))
train <- build[split,]
val <- build[-split,]

Decision Tree Model

mtree  <- rpart(ifOperStatus~ .,data=train,method = "class",parms = list(prior = c(0.3, 0.7)))
#parms = list(prior = c(0.5, 0.5)
#Confusion matrix
rpartpred <- predict(mtree,val,type="class")
confusionMatrix(rpartpred,val$ifOperStatus)
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction down   up
##       down  215  300
##       up     54 3995
##                                         
##                Accuracy : 0.9224        
##                  95% CI : (0.9143, 0.93)
##     No Information Rate : 0.9411        
##     P-Value [Acc > NIR] : 1             
##                                         
##                   Kappa : 0.5106        
##  Mcnemar's Test P-Value : <2e-16        
##                                         
##             Sensitivity : 0.79926       
##             Specificity : 0.93015       
##          Pos Pred Value : 0.41748       
##          Neg Pred Value : 0.98666       
##              Prevalence : 0.05894       
##          Detection Rate : 0.04711       
##    Detection Prevalence : 0.11284       
##       Balanced Accuracy : 0.86470       
##                                         
##        'Positive' Class : down          
## 
#Plot tree
plot(mtree)
#Lable on Decision Tree
text(mtree)

library(rattle)
## Rattle: A free graphical interface for data mining with R.
## Version 4.1.0 Copyright (c) 2006-2015 Togaware Pty Ltd.
## Type 'rattle()' to shake, rattle, and roll your data.
library(rpart.plot)
library(RColorBrewer)
#plot 
prp(mtree, faclen = 0, cex = 0.8, extra = 1)

tot_count <- function(x, labs, digits, varlen)
{paste(labs, "\n\nn =", x$frame$n)}
## Decision Tree
prp(mtree, faclen = 0, cex = 0.8, node.fun=tot_count)

printcp(mtree)
## 
## Classification tree:
## rpart(formula = ifOperStatus ~ ., data = train, method = "class", 
##     parms = list(prior = c(0.3, 0.7)))
## 
## Variables actually used in tree construction:
## [1] bponOntOpInfoDistance                   
## [2] gponOntOltsideOpInfoRxOpticalSignalLevel
## [3] X15MinDnFwdByteCounter                  
## [4] X15MinUpFwdByteCounter                  
## 
## Root node error: 3194.4/10648 = 0.3
## 
## n= 10648 
## 
##         CP nsplit rel error  xerror     xstd
## 1 0.620251      0   1.00000 1.00000 0.037639
## 2 0.014403      1   0.37975 0.37975 0.021782
## 3 0.010304      4   0.33654 0.35330 0.017771
## 4 0.010000      5   0.32624 0.34084 0.017437
bestcp <- mtree$cptable[which.min(mtree$cptable[,"xerror"]),"CP"]
#Pruning & classification matrix of Pruning
pruned <- prune(mtree, cp = bestcp)
prp(pruned, faclen = 0, cex = 0.8, extra = 1)

predictions <- predict(pruned, val, type="class")
confusionMatrix(predictions,val$ifOperStatus)
## Confusion Matrix and Statistics
## 
##           Reference
## Prediction down   up
##       down  215  300
##       up     54 3995
##                                         
##                Accuracy : 0.9224        
##                  95% CI : (0.9143, 0.93)
##     No Information Rate : 0.9411        
##     P-Value [Acc > NIR] : 1             
##                                         
##                   Kappa : 0.5106        
##  Mcnemar's Test P-Value : <2e-16        
##                                         
##             Sensitivity : 0.79926       
##             Specificity : 0.93015       
##          Pos Pred Value : 0.41748       
##          Neg Pred Value : 0.98666       
##              Prevalence : 0.05894       
##          Detection Rate : 0.04711       
##    Detection Prevalence : 0.11284       
##       Balanced Accuracy : 0.86470       
##                                         
##        'Positive' Class : down          
## 
##Scoring 
library(ROCR)
## Loading required package: gplots
## 
## Attaching package: 'gplots'
## The following object is masked from 'package:stats':
## 
##     lowess
val1 = predict(pruned, val, type = "prob")
pred_val <-prediction(val1[,2],val$ifOperStatus)
perf_val <- performance(pred_val,"auc")
perf_val
## An object of class "performance"
## Slot "x.name":
## [1] "None"
## 
## Slot "y.name":
## [1] "Area under the ROC curve"
## 
## Slot "alpha.name":
## [1] "none"
## 
## Slot "x.values":
## list()
## 
## Slot "y.values":
## [[1]]
## [1] 0.8938214
## 
## 
## Slot "alpha.values":
## list()
plot(performance(pred_val, measure="lift", x.measure="rpp"), colorize=TRUE)

# Calculating True Positive and False Positive Rate
perf_val <- performance(pred_val, "tpr", "fpr")
#Plot the ROC curve
plot(perf_val, col = "green", lwd = 1.5)

#Calculating KS statistics
ks1.tree <- max(attr(perf_val, "y.values")[[1]] - (attr(perf_val, "x.values")[[1]]))
ks1.tree
## [1] 0.733552
## Cross Validation Method1
library(ROSE)
## Loaded ROSE 0.0-3
ROSE.BOOT <- ROSE.eval(ifOperStatus ~ ., data = train, learner = rpart,method.assess = "BOOT", extr.pred = function(obj)obj[,2], seed = 1)
# Cross Validation Method2
library(caret)
tc <- trainControl("cv",10)
rpart.grid <- expand.grid(.cp=0.2)
(train.rpart <- train(ifOperStatus ~., data= train, method="rpart",trControl=tc,tuneGrid=rpart.grid))
## CART 
## 
## 10648 samples
##     6 predictor
##     2 classes: 'down', 'up' 
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 9583, 9583, 9584, 9582, 9584, 9583, ... 
## Resampling results
## 
##   Accuracy   Kappa      Accuracy SD  Kappa SD 
##   0.9536065  0.6241365  0.006786997  0.0476512
## 
## Tuning parameter 'cp' was held constant at a value of 0.2
## 
# Model Perfomance on test data
ptest <- predict(mtree, test1)
answers <- as.vector(ptest)
pml_write_files = function(x) {
    n = length(x)
    for (i in 1:n) {
        filename = paste0("problem_id_", i, ".txt")
        write.table(x[i], file = filename, quote = FALSE, row.names = FALSE, 
            col.names = FALSE)
    }
}
pml_write_files(answers)
## Prediction of probabilites new data
ptest
##           down        up
## 1   0.06152869 0.9384713
## 2   0.60895045 0.3910496
## 3   0.06152869 0.9384713
## 4   0.07471066 0.9252893
## 5   0.06152869 0.9384713
## 6   0.41645011 0.5835499
## 7   0.06152869 0.9384713
## 8   0.07471066 0.9252893
## 9   0.06152869 0.9384713
## 10  0.07471066 0.9252893
## 11  0.06152869 0.9384713
## 12  0.60895045 0.3910496
## 13  0.06152869 0.9384713
## 14  0.60895045 0.3910496
## 15  0.06152869 0.9384713
## 16  0.60895045 0.3910496
## 17  0.06152869 0.9384713
## 18  0.60895045 0.3910496
## 19  0.06152869 0.9384713
## 20  0.60895045 0.3910496
## 21  0.06152869 0.9384713
## 22  0.60895045 0.3910496
## 23  0.06152869 0.9384713
## 24  0.60895045 0.3910496
## 25  0.06152869 0.9384713
## 26  0.60895045 0.3910496
## 27  0.06152869 0.9384713
## 28  0.60895045 0.3910496
## 29  0.06152869 0.9384713
## 30  0.60895045 0.3910496
## 31  0.06152869 0.9384713
## 32  0.60895045 0.3910496
## 33  0.06152869 0.9384713
## 34  0.60895045 0.3910496
## 35  0.06152869 0.9384713
## 36  0.60895045 0.3910496
## 37  0.06152869 0.9384713
## 38  0.60895045 0.3910496
## 39  0.06152869 0.9384713
## 40  0.60895045 0.3910496
## 41  0.06152869 0.9384713
## 42  0.60895045 0.3910496
## 43  0.06152869 0.9384713
## 44  0.60895045 0.3910496
## 45  0.06152869 0.9384713
## 46  0.41645011 0.5835499
## 47  0.06152869 0.9384713
## 48  0.06152869 0.9384713
## 49  0.06152869 0.9384713
## 50  0.60895045 0.3910496
## 51  0.06152869 0.9384713
## 52  0.60895045 0.3910496
## 53  0.06152869 0.9384713
## 54  0.60895045 0.3910496
## 55  0.06152869 0.9384713
## 56  0.60895045 0.3910496
## 57  0.06152869 0.9384713
## 58  0.60895045 0.3910496
## 59  0.06152869 0.9384713
## 60  0.60895045 0.3910496
## 61  0.06152869 0.9384713
## 62  0.60895045 0.3910496
## 63  0.06152869 0.9384713
## 64  0.60895045 0.3910496
## 65  0.06152869 0.9384713
## 66  0.07471066 0.9252893
## 67  0.06152869 0.9384713
## 68  0.60895045 0.3910496
## 69  0.06152869 0.9384713
## 70  0.60895045 0.3910496
## 71  0.06152869 0.9384713
## 72  0.60895045 0.3910496
## 73  0.06152869 0.9384713
## 74  0.60895045 0.3910496
## 75  0.06152869 0.9384713
## 76  0.07471066 0.9252893
## 77  0.06152869 0.9384713
## 78  0.60895045 0.3910496
## 79  0.06152869 0.9384713
## 80  0.60895045 0.3910496
## 81  0.06152869 0.9384713
## 82  0.07471066 0.9252893
## 83  0.06152869 0.9384713
## 84  0.60895045 0.3910496
## 85  0.06152869 0.9384713
## 86  0.60895045 0.3910496
## 87  0.06152869 0.9384713
## 88  0.60895045 0.3910496
## 89  0.06152869 0.9384713
## 90  0.60895045 0.3910496
## 91  0.06152869 0.9384713
## 92  0.60895045 0.3910496
## 93  0.06152869 0.9384713
## 94  0.41645011 0.5835499
## 95  0.06152869 0.9384713
## 96  0.41645011 0.5835499
## 97  0.06152869 0.9384713
## 98  0.60895045 0.3910496
## 99  0.06152869 0.9384713
## 100 0.60895045 0.3910496
## 101 0.06152869 0.9384713
## 102 0.60895045 0.3910496
## 103 0.06152869 0.9384713
## 104 0.41645011 0.5835499
## 105 0.06152869 0.9384713
## 106 0.60895045 0.3910496
## 107 0.60895045 0.3910496
## 108 0.06152869 0.9384713
## 109 0.60895045 0.3910496
## 110 0.06152869 0.9384713
## 111 0.60895045 0.3910496
## 112 0.06152869 0.9384713
## 113 0.60895045 0.3910496
## 114 0.06152869 0.9384713
## 115 0.60895045 0.3910496
## 116 0.06152869 0.9384713
## 117 0.60895045 0.3910496
## 118 0.06152869 0.9384713
## 119 0.07471066 0.9252893
## 120 0.06152869 0.9384713