I ran a NeuralNet model on churn data set and tested the accuracy of the model.

I uses NeuralNet instead of nnet function to choose the number of layers and nodes in my model.

churn <- read.table("~/1 UW Tacoma/560 data mining/rcode/final/churn.txt", header = T, sep=',', na.strings=c('','NA'), stringsAsFactors=FALSE)
names(churn) <- c("State","Account.Length","Area.Code","Phone","Intl.Plan.","VMail.Plan","VMail.Message","Day.Mins" ,"Day.Calls" ,"Day.Charge","Eve.Mins" ,"Eve.Calls","Eve.Charge","Night.Mins","Night.Calls" ,"Night.Charge","Intl.Mins" ,"Intl.Calls","Intl.Charge","CustServ.Calls" ,"Churn.")
churn$Intl.Plan. <- ifelse(churn$Intl.Plan.=="yes",1,0)
churn$VMail.Plan <- ifelse(churn$VMail.Plan=="yes",1,0)
churn$Churn. <- ifelse(churn$Churn.=="False.",0,1)
nn_data <- churn[ ,-c(1,3,4,9,15)]
names(nn_data)
##  [1] "Account.Length" "Intl.Plan."     "VMail.Plan"     "VMail.Message" 
##  [5] "Day.Mins"       "Day.Charge"     "Eve.Mins"       "Eve.Calls"     
##  [9] "Eve.Charge"     "Night.Mins"     "Night.Charge"   "Intl.Mins"     
## [13] "Intl.Calls"     "Intl.Charge"    "CustServ.Calls" "Churn."
any(is.na(churn))
## [1] FALSE
normalize <- function(x) {
(x - min(x)) / (max(x) - min(x))}

library(magrittr)
library(dplyr)
## 
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
## 
##     filter, lag
## The following objects are masked from 'package:base':
## 
##     intersect, setdiff, setequal, union
nn_data <- nn_data %>% mutate_all(normalize)
str(churn)
## 'data.frame':    113 obs. of  21 variables:
##  $ State         : chr  "AL" "MA" "MO" "LA" ...
##  $ Account.Length: int  118 121 147 117 141 65 74 168 95 62 ...
##  $ Area.Code     : int  510 510 415 408 415 415 415 408 510 415 ...
##  $ Phone         : chr  "391-8027" "355-9993" "329-9001" "335-4719" ...
##  $ Intl.Plan.    : num  1 0 1 0 1 0 0 0 0 0 ...
##  $ VMail.Plan    : num  0 1 0 0 1 0 0 0 0 0 ...
##  $ VMail.Message : int  0 24 0 0 37 0 0 0 0 0 ...
##  $ Day.Mins      : num  223 218 157 184 259 ...
##  $ Day.Calls     : int  98 88 79 97 84 137 127 96 88 70 ...
##  $ Day.Charge    : num  38 37.1 26.7 31.4 44 ...
##  $ Eve.Mins      : num  221 348 103 352 222 ...
##  $ Eve.Calls     : int  101 108 94 80 111 83 148 71 75 76 ...
##  $ Eve.Charge    : num  18.75 29.62 8.76 29.89 18.87 ...
##  $ Night.Mins    : num  204 213 212 216 326 ...
##  $ Night.Calls   : int  118 118 96 90 97 111 94 128 115 99 ...
##  $ Night.Charge  : num  9.18 9.57 9.53 9.71 14.69 ...
##  $ Intl.Mins     : num  6.3 7.5 7.1 8.7 11.2 12.7 9.1 11.2 12.3 13.1 ...
##  $ Intl.Calls    : int  6 7 6 4 5 6 5 2 5 6 ...
##  $ Intl.Charge   : num  1.7 2.03 1.92 2.35 3.02 3.43 2.46 3.02 3.32 3.54 ...
##  $ CustServ.Calls: int  0 3 0 1 0 4 0 1 3 4 ...
##  $ Churn.        : num  0 0 0 0 0 1 0 0 0 0 ...
library(neuralnet)
## 
## Attaching package: 'neuralnet'
## The following object is masked from 'package:dplyr':
## 
##     compute
set.seed(222)

ind <- sample(1:nrow(nn_data), 100)

trainDF <- nn_data[ind,]
testDF <- nn_data[-ind,]
allVars <- colnames(nn_data)
predictorVars <- allVars[!allVars%in%"Churn."]
predictorVars <- paste(predictorVars, collapse = "+")
form = as.formula(paste("Churn.~", predictorVars, collapse = "+"))

Gradient descent method gives the derivative with respect go weights (delta x). Weights are corrected based on this derivative.Sometimes weights tend to over correct. To avoid this, the derivative is multiplied by learning rate.

neuralModel1 <- neuralnet(formula = form, hidden = c(10, 3), data = trainDF)
plot(neuralModel1, rep = "best")

neuralModel2 <- neuralnet(formula = form, hidden = c(10,5), data = trainDF)

plot(neuralModel2, rep = "best")

neuralModel3 <- neuralnet(formula = form, hidden = c(6,8), data = trainDF, learningrate = .5)

plot(neuralModel3, rep = "best")

neuralModel4 <- neuralnet(formula = form, hidden = c(6,9), data = trainDF, stepmax=1e6)

plot(neuralModel4, rep = "best")

neuralModel5 <- neuralnet(formula = form, hidden = c(8,7), data = trainDF, stepmax=1e6)

plot(neuralModel5, rep = "best")

neuralModel6 <- neuralnet(formula = form, hidden = c(7,5), data = trainDF, stepmax=1e6)

plot(neuralModel6, rep = "best")

neuralModel7 <- neuralnet(formula = form, hidden = c(7,5), data = trainDF, stepmax=1e6)

plot(neuralModel7, rep = "best")

neuralModel8 <- neuralnet(formula = form, hidden = c(5,6), data = trainDF, stepmax=1e6)

plot(neuralModel8, rep = "best")

pr.nn1 <- compute(neuralModel1,testDF[,c(-13)])
pr.nn_1 <- pr.nn1$net.result*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
test.r1 <- (testDF$Churn.)*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
MSE.nn1 <- sum((test.r1 - pr.nn_1)^2)/nrow(testDF)
MSE.nn1 #Mean Standard error
## [1] 0.07720877319
pr.nn2 <- compute(neuralModel2,testDF[,c(-13)])
pr.nn_2 <- pr.nn2$net.result*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
test.r2 <- (testDF$Churn.)*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
MSE.nn2 <- sum((test.r2 - pr.nn_2)^2)/nrow(testDF)
MSE.nn2
## [1] 3.234171743
pr.nn3 <- compute(neuralModel3,testDF[,c(-13)])
pr.nn_3 <- pr.nn3$net.result*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
test.r3 <- (testDF$Churn.)*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
MSE.nn3 <- sum((test.r3 - pr.nn_3)^2)/nrow(testDF)
MSE.nn3
## [1] 0.3300210923
pr.nn4 <- compute(neuralModel4,testDF[,c(-13)])
pr.nn_4 <- pr.nn4$net.result*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
test.r4 <- (testDF$Churn.)*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
MSE.nn4 <- sum((test.r4 - pr.nn_4)^2)/nrow(testDF)
MSE.nn4
## [1] 0.1482866564
pr.nn5 <- compute(neuralModel5,testDF[,c(-13)])
pr.nn_5 <- pr.nn5$net.result*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
test.r5 <- (testDF$Churn.)*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
MSE.nn5 <- sum((test.r5 - pr.nn_5)^2)/nrow(testDF)
MSE.nn5
## [1] 2.910228995
pr.nn6 <- compute(neuralModel6,testDF[,c(-13)])
pr.nn_6 <- pr.nn6$net.result*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
test.r6 <- (testDF$Churn.)*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
MSE.nn6 <- sum((test.r6 - pr.nn_6)^2)/nrow(testDF)
MSE.nn6
## [1] 0.1024967995
pr.nn7 <- compute(neuralModel7,testDF[,c(-13)])
pr.nn_7 <- pr.nn7$net.result*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
test.r7 <- (testDF$Churn.)*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
MSE.nn7 <- sum((test.r7 - pr.nn_7)^2)/nrow(testDF)
MSE.nn7 #Lowest Mean Standard Error
## [1] 0.914795132
pr.nn8 <- compute(neuralModel8,testDF[,c(-13)])
pr.nn_8 <- pr.nn8$net.result*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
test.r8 <- (testDF$Churn.)*(max(nn_data$Churn.)-min(nn_data$Churn.))+min(nn_data$Churn.)
MSE.nn8 <- sum((test.r8 - pr.nn_8)^2)/nrow(testDF)
MSE.nn8
## [1] 0.0460874454
pr.nn7$neurons
## [[1]]
##     1 Account.Length Intl.Plan. VMail.Plan VMail.Message       Day.Mins
## 14  1   0.4024390244          0          1  0.7173913043 0.477372262774
## 26  1   0.8048780488          0          0  0.0000000000 0.094525547445
## 27  1   0.3963414634          0          0  0.0000000000 0.610218978102
## 35  1   0.8475609756          0          0  0.0000000000 0.297445255474
## 49  1   0.5243902439          0          0  0.0000000000 0.369708029197
## 72  1   0.4390243902          0          0  0.0000000000 0.881021897810
## 73  1   0.8170731707          0          0  0.0000000000 0.009854014599
## 81  1   0.7195121951          0          1  0.6304347826 0.439416058394
## 86  1   0.4390243902          0          0  0.0000000000 0.310218978102
## 88  1   0.4268292683          0          0  0.0000000000 0.238686131387
## 93  1   0.5426829268          0          0  0.0000000000 0.508394160584
## 101 1   0.5182926829          0          0  0.0000000000 0.654014598540
## 105 1   0.8963414634          0          0  0.0000000000 0.623722627737
##        Day.Charge     Eve.Mins    Eve.Calls   Eve.Charge   Night.Mins
## 14  0.47745813654 0.5667915106 0.1465517241 0.5668135095 0.4023800669
## 26  0.09467582653 0.3292759051 0.1293103448 0.3292951542 0.7177389364
## 27  0.61034778875 0.5315230961 0.5086206897 0.5315712188 0.7017478617
## 35  0.29755259768 0.7509363296 0.3793103448 0.7507342144 0.4864261807
## 49  0.36968656076 0.7375156055 0.1637931034 0.7375183554 0.6359241354
## 72  0.88106483469 0.4675405743 0.4482758621 0.4676945668 0.7906284864
## 73  0.00987548304 0.1432584270 0.3189655172 0.1431718062 0.4295277055
## 81  0.43945899528 0.6076779026 0.3275862069 0.6075624082 0.9873558944
## 86  0.31021897810 0.5109238452 0.4913793103 0.5110132159 0.1937523243
## 88  0.23872906827 0.4425717853 0.5086206897 0.4427312775 0.7277798438
## 93  0.50837269214 0.5496254682 0.2413793103 0.5495594714 0.4960952027
## 101 0.65414340919 0.4868913858 0.4310344828 0.4867841410 0.4927482335
## 105 0.62387290683 0.3642322097 0.6293103448 0.3641703377 0.6838973596
##     Night.Charge    Intl.Mins  Intl.Charge CustServ.Calls Churn.
## 14  0.4024793388 0.3939393939 0.3932584270            0.2      0
## 26  0.7173553719 0.6484848485 0.6471910112            0.4      0
## 27  0.7016528926 0.4121212121 0.4112359551            0.2      0
## 35  0.4859504132 0.4606060606 0.4606741573            0.2      0
## 49  0.6355371901 0.3515151515 0.3505617978            0.4      0
## 72  0.7900826446 0.4969696970 0.4966292135            0.0      1
## 73  0.4297520661 0.2848484848 0.2831460674            0.8      1
## 81  0.9876033058 0.3090909091 0.3078651685            0.0      0
## 86  0.1933884298 0.4606060606 0.4606741573            0.2      0
## 88  0.7272727273 0.2666666667 0.2651685393            0.2      0
## 93  0.4958677686 0.3151515152 0.3146067416            0.8      0
## 101 0.4925619835 0.5030303030 0.5033707865            0.8      0
## 105 0.6834710744 0.3696969697 0.3685393258            0.6      0
## 
## [[2]]
##     [,1]            [,2]             [,3]         [,4]           [,5]
## 14     1 0.9130498038082 0.04531628989380 1.0000000000 0.001242502499
## 26     1 0.2301254642600 0.43973973136082 0.9810906492 0.937450985649
## 27     1 0.0540074278891 0.17237415658602 0.9293383415 0.840606858200
## 35     1 0.8848613266438 0.00005175894536 0.9556232643 0.285761528171
## 49     1 0.7784802205391 0.00310217394588 0.9247454237 0.943917760940
## 72     1 0.0002854186691 1.00000000000000 0.6607178354 0.999645008151
## 73     1 0.0144435571579 1.00000000000000 0.6690978084 0.999346974410
## 81     1 0.9669711762407 0.90492805877479 1.0000000000 0.989330329305
## 86     1 0.4509250111280 0.00042320894768 0.9340530053 0.001079204984
## 88     1 0.5052260549854 0.61275937606779 0.9183049696 0.986029854433
## 93     1 0.0875180258821 0.13676651298419 0.9054171390 0.782822470502
## 101    1 0.0058988920263 0.19419018306359 0.9461698284 0.084697577690
## 105    1 0.0055212539249 0.89174001532146 0.9411940675 0.975508415276
##                    [,6]           [,7]           [,8]
## 14  0.00000000104363546 1.000000000000 0.915329671296
## 26  0.99021993235589822 0.906222600349 0.002994839799
## 27  0.99128791922710047 0.219877672753 0.506272161351
## 35  0.96287070645521633 0.001544642149 0.205936416482
## 49  0.98790205614590632 0.039690393163 0.165578914206
## 72  0.80402355053103636 0.283614793905 0.755113409631
## 73  0.42778400448814191 0.991612484589 0.107161092776
## 81  0.00000493848229876 1.000000000000 0.425111948049
## 86  0.85378313496689073 0.003958031917 0.761734508354
## 88  0.98778927990570897 0.772319764813 0.292340734073
## 93  0.98839984296266037 0.407688596202 0.248455167038
## 101 0.99062297995072968 0.227669530067 0.344593812054
## 105 0.99476761189556895 0.903383735139 0.168578836135
## 
## [[3]]
##     [,1]               [,2]            [,3]
## 14     1 0.0000005424631758 0.0001399107155
## 26     1 0.0034731269335437 0.0061964152231
## 27     1 0.8427800803973871 0.6362020132359
## 35     1 0.0007170705253198 0.0016290775090
## 49     1 0.0097815632350900 0.0187337969334
## 72     1 0.9459681571476115 0.5560178061730
## 73     1 0.0125845847581818 0.0367280821883
## 81     1 0.0000010275486536 0.0001894956288
## 86     1 0.0727653797736848 0.0522430074492
## 88     1 0.0017669451883630 0.0017439273737
## 93     1 0.3333274852448020 0.2465952727081
## 101    1 0.3545236496290259 0.1622051698790
## 105    1 0.0477813731257835 0.0203238994264
##                                             [,4]          [,5]
## 14  0.000000000000000000000000000000005247980245 0.06522872142
## 26  0.000000000000000000223429236001093231697365 0.18757094361
## 27  0.000041183219065076248069021125353117440682 0.35714987035
## 35  0.000000000000514742996460807785644858691754 0.32218096741
## 49  0.000000000188328682783473150700107034971609 0.35439910357
## 72  0.009624917788037225946373709462022816296667 0.11323135360
## 73  0.000000000000000198319502668119843954291914 0.08471806048
## 81  0.000000000000000000000000000134483302566181 0.01626779614
## 86  0.000000011532233858371514043268668547881362 0.32525663795
## 88  0.000000000000000000210398355809532475960838 0.11299924139
## 93  0.000000002494796332810599761957426334646470 0.38538949703
## 101 0.000000598847831457207274361009829988233832 0.37826852280
## 105 0.000000000000002454567607247672587347450679 0.10195511697
##             [,6]
## 14  0.8162950145
## 26  0.7880621612
## 27  0.8287968967
## 35  0.8830550780
## 49  0.8807222379
## 72  0.4435920646
## 73  0.3769908697
## 81  0.4628340208
## 86  0.8655456726
## 88  0.7455047146
## 93  0.8379033536
## 101 0.8151999382
## 105 0.6052673490