Method1: Neuralnet

library(neuralnet)
## Loading required package: grid
## Loading required package: MASS
#?infert  #Data source 
dim(infert)
## [1] 248   8
#?neuralnet

nn=neuralnet(case~age+parity+induced+spontaneous,data=infert,hidden=2,err.fct="ce",linear.output=FALSE) #Because the outcome is logistical not linear
#hidden mean the constant intercept here(blue)
nn
## Call: neuralnet(formula = case ~ age + parity + induced + spontaneous,     data = infert, hidden = 2, err.fct = "ce", linear.output = FALSE)
## 
## 1 repetition was calculated.
## 
##         Error Reached Threshold Steps
## 1 123.8188988    0.009886243403 14194
plot(nn)
nn$net.result #Classification
## [[1]]
##                 [,1]
## 1   0.15393231441329
## 2   0.59566834982867
## 3   0.14516313577733
## 4   0.15226184852471
## 5   0.35708627802210
## 6   0.52428713997859
## 7   0.14965737188372
## 8   0.14679615878672
## 9   0.31712746099080
## 10  0.14610121784251
## 11  0.16159021458046
## 12  0.55627128900829
## 13  0.29944921753995
## 14  0.17448343813142
## 15  0.73689488529885
## 16  0.31682012921245
## 17  0.17390826767738
## 18  0.43832650459574
## 19  0.65679322910641
## 20  0.87161763700106
## 21  0.82003746591951
## 22  0.52964908037439
## 23  0.87052105435810
## 24  0.73843863709379
## 25  0.62369468758594
## 26  0.93071278578936
## 27  0.92599553303211
## 28  0.65111686675344
## 29  0.61843807073186
## 30  0.14955857119196
## 31  0.62564337601092
## 32  0.17470026971625
## 33  0.59566834982867
## 34  0.90163366094246
## 35  0.50002184841591
## 36  0.91619037052885
## 37  0.73843863709379
## 38  0.92835656418618
## 39  0.14511322055218
## 40  0.49853573503472
## 41  0.77994325079946
## 42  0.16377064775903
## 43  0.18281127400204
## 44  0.84825026617897
## 45  0.15550633042136
## 46  0.43969585740584
## 47  0.87052105435810
## 48  0.14750027529159
## 49  0.29807277700016
## 50  0.75816020673774
## 51  0.87935979860613
## 52  0.35322477416641
## 53  0.22948575603004
## 54  0.49853573503472
## 55  0.87991232653367
## 56  0.91989313805808
## 57  0.15977161320932
## 58  0.87052105435810
## 59  0.49853573503472
## 60  0.85981444618755
## 61  0.91619037052885
## 62  0.41038248732414
## 63  0.68534828079627
## 64  0.89533615853523
## 65  0.15671280241912
## 66  0.62564337601092
## 67  0.31712746099080
## 68  0.90681956689598
## 69  0.73427025518650
## 70  0.22159237700764
## 71  0.31805467543904
## 72  0.41038248732414
## 73  0.59415563841970
## 74  0.14778008821079
## 75  0.84763534459698
## 76  0.59415563841970
## 77  0.22064264734628
## 78  0.22159237700764
## 79  0.59415563841970
## 80  0.68534828079627
## 81  0.52964908037439
## 82  0.53012992683176
## 83  0.35989137902396
## 84  0.00001364255274
## 85  0.18751945701512
## 86  0.14516313577733
## 87  0.14698488529722
## 88  0.20463863853357
## 89  0.17773371833090
## 90  0.14965737188372
## 91  0.43789300757506
## 92  0.31712746099080
## 93  0.19810326593083
## 94  0.14624647355882
## 95  0.18180089123615
## 96  0.15679820060897
## 97  0.15316994400318
## 98  0.16593299479476
## 99  0.15815328914271
## 100 0.15301184310934
## 101 0.15160471985657
## 102 0.15013177969111
## 103 0.19879700563464
## 104 0.17863093337206
## 105 0.14757015027667
## 106 0.14610121784251
## 107 0.16608615640953
## 108 0.19220700528637
## 109 0.14962583672582
## 110 0.14866060285144
## 111 0.14523850631656
## 112 0.19128137404998
## 113 0.16126890519138
## 114 0.15824921851621
## 115 0.17470026971625
## 116 0.59566834982867
## 117 0.14679615878672
## 118 0.17491849181837
## 119 0.14757015027667
## 120 0.16608615640953
## 121 0.19800903126944
## 122 0.52476676684681
## 123 0.25403200575170
## 124 0.15231485980226
## 125 0.16377064775903
## 126 0.18281127400204
## 127 0.14825484117184
## 128 0.28265322088846
## 129 0.16869054650633
## 130 0.15977161320932
## 131 0.29421846939633
## 132 0.15671280241912
## 133 0.15151261733332
## 134 0.01706085484216
## 135 0.14950270352728
## 136 0.22948575603004
## 137 0.25403200575170
## 138 0.16159021458046
## 139 0.18253458543536
## 140 0.65429664172827
## 141 0.33721427355886
## 142 0.25403200575170
## 143 0.15815328914271
## 144 0.52964908037439
## 145 0.22159237700764
## 146 0.16171135856014
## 147 0.14659169205973
## 148 0.18697273766904
## 149 0.15824921851621
## 150 0.31712746099080
## 151 0.17123008214726
## 152 0.40712127548404
## 153 0.15087971906469
## 154 0.31805467543904
## 155 0.15087971906469
## 156 0.59415563841970
## 157 0.84763534459698
## 158 0.15679820060897
## 159 0.40997406366950
## 160 0.15087971906469
## 161 0.15679820060897
## 162 0.16171135856014
## 163 0.14757015027667
## 164 0.53012992683176
## 165 0.35989137902396
## 166 0.00001364255274
## 167 0.18751945701512
## 168 0.14516313577733
## 169 0.23887270469096
## 170 0.14527978734780
## 171 0.26369723266552
## 172 0.14965737188372
## 173 0.23011197802549
## 174 0.19279788405130
## 175 0.19810326593083
## 176 0.20474477503670
## 177 0.14783597452023
## 178 0.15679820060897
## 179 0.49328987759573
## 180 0.16593299479476
## 181 0.14597159820370
## 182 0.24980859269390
## 183 0.23117967816182
## 184 0.21172899268451
## 185 0.65759404992524
## 186 0.17863093337206
## 187 0.14757015027667
## 188 0.33721427355886
## 189 0.73843863709379
## 190 0.85981444618755
## 191 0.14962583672582
## 192 0.14866060285144
## 193 0.14523850631656
## 194 0.31352813598057
## 195 0.16126890519138
## 196 0.15824921851621
## 197 0.14728220476893
## 198 0.18751945701512
## 199 0.43789300757506
## 200 0.17491849181837
## 201 0.14757015027667
## 202 0.16608615640953
## 203 0.19800903126944
## 204 0.15417492151812
## 205 0.15333114951615
## 206 0.14550097315762
## 207 0.16377064775903
## 208 0.18281127400204
## 209 0.14825484117184
## 210 0.15550633042136
## 211 0.76197664635892
## 212 0.15977161320932
## 213 0.14750027529159
## 214 0.15671280241912
## 215 0.15151261733332
## 216 0.00173456469897
## 217 0.20346633699350
## 218 0.16826228502981
## 219 0.49853573503472
## 220 0.68356704883408
## 221 0.28097837242691
## 222 0.33721427355886
## 223 0.15977161320932
## 224 0.15333114951615
## 225 0.15815328914271
## 226 0.17838523965602
## 227 0.22159237700764
## 228 0.16171135856014
## 229 0.16593299479476
## 230 0.84763534459698
## 231 0.31805467543904
## 232 0.14868567778885
## 233 0.17123008214726
## 234 0.73427025518650
## 235 0.22159237700764
## 236 0.15824921851621
## 237 0.22159237700764
## 238 0.15679820060897
## 239 0.14778008821079
## 240 0.59216148759090
## 241 0.29944921753995
## 242 0.22064264734628
## 243 0.41038248732414
## 244 0.59415563841970
## 245 0.16171135856014
## 246 0.52964908037439
## 247 0.53012992683176
## 248 0.35989137902396
nn$weights 
## [[1]]
## [[1]][[1]]
##                [,1]          [,2]
## [1,]  3.79222177306  5.4812931836
## [2,] -2.98696789505 -0.1135839746
## [3,]  2.22424173705  1.9375378074
## [4,] 27.89550162397 -2.4983429111
## [5,]  0.02273745222 -3.7423648008
## 
## [[1]][[2]]
##                 [,1]
## [1,]     2.900003138
## [2,] -1080.193861320
## [3,]    -4.675214309
nn$result.matrix #summary
##                                          1
## error                     123.818898815850
## reached.threshold           0.009886243403
## steps                   14194.000000000000
## Intercept.to.1layhid1       3.792221773056
## age.to.1layhid1            -2.986967895048
## parity.to.1layhid1          2.224241737047
## induced.to.1layhid1        27.895501623966
## spontaneous.to.1layhid1     0.022737452216
## Intercept.to.1layhid2       5.481293183636
## age.to.1layhid2            -0.113583974648
## parity.to.1layhid2          1.937537807408
## induced.to.1layhid2        -2.498342911082
## spontaneous.to.1layhid2    -3.742364800794
## Intercept.to.case           2.900003137905
## 1layhid.1.to.case       -1080.193861319759
## 1layhid.2.to.case          -4.675214308834
nn$covariate  #Show the variables 
##        [,1] [,2] [,3] [,4]
##   [1,]   26    6    1    2
##   [2,]   42    1    1    0
##   [3,]   39    6    2    0
##   [4,]   34    4    2    0
##   [5,]   35    3    1    1
##   [6,]   36    4    2    1
##   [7,]   23    1    0    0
##   [8,]   32    2    0    0
##   [9,]   21    1    0    1
##  [10,]   28    2    0    0
##  [11,]   29    2    1    0
##  [12,]   37    4    2    1
##  [13,]   31    1    1    0
##  [14,]   29    3    2    0
##  [15,]   31    2    1    1
##  [16,]   27    2    2    0
##  [17,]   30    5    2    1
##  [18,]   26    1    0    1
##  [19,]   25    3    2    1
##  [20,]   44    1    0    1
##  [21,]   40    1    0    1
##  [22,]   35    2    2    0
##  [23,]   28    2    0    2
##  [24,]   36    1    0    1
##  [25,]   27    2    1    1
##  [26,]   40    2    0    2
##  [27,]   38    2    0    2
##  [28,]   34    3    0    2
##  [29,]   28    4    1    2
##  [30,]   30    4    2    0
##  [31,]   32    1    0    1
##  [32,]   34    2    1    0
##  [33,]   42    1    1    0
##  [34,]   32    2    0    2
##  [35,]   39    1    1    0
##  [36,]   35    2    0    2
##  [37,]   36    1    0    1
##  [38,]   34    3    1    2
##  [39,]   30    3    0    0
##  [40,]   28    1    0    1
##  [41,]   39    3    0    2
##  [42,]   35    1    0    0
##  [43,]   41    1    0    0
##  [44,]   37    2    1    1
##  [45,]   30    1    0    0
##  [46,]   37    1    1    0
##  [47,]   28    2    0    2
##  [48,]   27    4    2    0
##  [49,]   26    2    2    0
##  [50,]   38    3    0    2
##  [51,]   24    3    1    2
##  [52,]   36    5    1    2
##  [53,]   27    3    1    1
##  [54,]   28    1    0    1
##  [55,]   29    2    0    2
##  [56,]   36    2    0    2
##  [57,]   28    2    1    0
##  [58,]   28    2    0    2
##  [59,]   28    1    0    1
##  [60,]   27    2    0    2
##  [61,]   35    2    0    2
##  [62,]   25    1    0    1
##  [63,]   34    1    0    1
##  [64,]   31    2    0    2
##  [65,]   26    2    1    0
##  [66,]   32    1    0    1
##  [67,]   21    1    0    1
##  [68,]   28    3    1    2
##  [69,]   37    3    0    2
##  [70,]   25    1    1    0
##  [71,]   32    1    1    0
##  [72,]   25    1    0    1
##  [73,]   31    1    0    1
##  [74,]   38    6    0    2
##  [75,]   26    2    0    2
##  [76,]   31    1    0    1
##  [77,]   31    2    0    1
##  [78,]   25    1    1    0
##  [79,]   31    1    0    1
##  [80,]   34    1    0    1
##  [81,]   35    2    2    0
##  [82,]   29    1    0    1
##  [83,]   23    1    0    1
##  [84,]   26    6    2    0
##  [85,]   42    1    0    0
##  [86,]   39    6    2    0
##  [87,]   34    4    0    1
##  [88,]   35    3    2    0
##  [89,]   36    4    1    1
##  [90,]   23    1    0    0
##  [91,]   32    2    2    0
##  [92,]   21    1    0    1
##  [93,]   28    2    0    1
##  [94,]   29    2    0    0
##  [95,]   37    4    1    1
##  [96,]   31    1    0    0
##  [97,]   29    3    0    1
##  [98,]   31    2    1    0
##  [99,]   27    2    1    0
## [100,]   30    5    0    2
## [101,]   26    1    0    0
## [102,]   25    3    0    1
## [103,]   44    1    0    0
## [104,]   40    1    0    0
## [105,]   35    2    0    0
## [106,]   28    2    0    0
## [107,]   36    1    0    0
## [108,]   27    2    0    1
## [109,]   40    2    0    0
## [110,]   38    2    0    0
## [111,]   34    3    0    0
## [112,]   28    4    0    2
## [113,]   30    4    1    1
## [114,]   32    1    0    0
## [115,]   34    2    1    0
## [116,]   42    1    1    0
## [117,]   32    2    0    0
## [118,]   39    1    0    0
## [119,]   35    2    0    0
## [120,]   36    1    0    0
## [121,]   34    3    2    0
## [122,]   30    3    0    2
## [123,]   28    1    1    0
## [124,]   39    3    1    0
## [125,]   35    1    0    0
## [126,]   41    1    0    0
## [127,]   37    2    0    0
## [128,]   30    1    1    0
## [129,]   37    1    0    0
## [130,]   28    2    1    0
## [131,]   27    4    2    1
## [132,]   26    2    1    0
## [133,]   38    3    1    0
## [134,]   24    3    2    1
## [135,]   36    5    1    1
## [136,]   27    3    1    1
## [137,]   28    1    1    0
## [138,]   29    2    1    0
## [139,]   36    2    1    0
## [140,]   28    2    1    1
## [141,]   28    2    2    0
## [142,]   28    1    1    0
## [143,]   27    2    1    0
## [144,]   35    2    2    0
## [145,]   25    1    1    0
## [146,]   34    1    0    0
## [147,]   31    2    0    0
## [148,]   26    2    0    1
## [149,]   32    1    0    0
## [150,]   21    1    0    1
## [151,]   28    3    2    0
## [152,]   37    3    1    1
## [153,]   25    1    0    0
## [154,]   32    1    1    0
## [155,]   25    1    0    0
## [156,]   31    1    0    1
## [157,]   26    2    0    2
## [158,]   31    1    0    0
## [159,]   31    2    2    0
## [160,]   25    1    0    0
## [161,]   31    1    0    0
## [162,]   34    1    0    0
## [163,]   35    2    0    0
## [164,]   29    1    0    1
## [165,]   23    1    0    1
## [166,]   26    6    2    0
## [167,]   42    1    0    0
## [168,]   39    6    2    0
## [169,]   34    4    0    2
## [170,]   35    3    0    0
## [171,]   36    4    0    2
## [172,]   23    1    0    0
## [173,]   32    2    0    1
## [174,]   21    1    1    0
## [175,]   28    2    0    1
## [176,]   29    2    0    1
## [177,]   37    4    0    1
## [178,]   31    1    0    0
## [179,]   29    3    0    2
## [180,]   31    2    1    0
## [181,]   27    2    0    0
## [182,]   30    5    1    2
## [183,]   26    1    1    0
## [184,]   25    3    1    1
## [185,]   44    1    1    0
## [186,]   40    1    0    0
## [187,]   35    2    0    0
## [188,]   28    2    2    0
## [189,]   36    1    0    1
## [190,]   27    2    0    2
## [191,]   40    2    0    0
## [192,]   38    2    0    0
## [193,]   34    3    0    0
## [194,]   28    4    2    1
## [195,]   30    4    1    1
## [196,]   32    1    0    0
## [197,]   34    2    0    0
## [198,]   42    1    0    0
## [199,]   32    2    2    0
## [200,]   39    1    0    0
## [201,]   35    2    0    0
## [202,]   36    1    0    0
## [203,]   34    3    2    0
## [204,]   30    3    0    1
## [205,]   28    1    0    0
## [206,]   39    3    0    0
## [207,]   35    1    0    0
## [208,]   41    1    0    0
## [209,]   37    2    0    0
## [210,]   30    1    0    0
## [211,]   37    1    0    1
## [212,]   28    2    1    0
## [213,]   27    4    2    0
## [214,]   26    2    1    0
## [215,]   38    3    1    0
## [216,]   24    3    2    0
## [217,]   36    5    2    1
## [218,]   27    3    2    0
## [219,]   28    1    0    1
## [220,]   29    2    1    1
## [221,]   36    2    0    1
## [222,]   28    2    2    0
## [223,]   28    2    1    0
## [224,]   28    1    0    0
## [225,]   27    2    1    0
## [226,]   35    2    1    0
## [227,]   25    1    1    0
## [228,]   34    1    0    0
## [229,]   31    2    1    0
## [230,]   26    2    0    2
## [231,]   32    1    1    0
## [232,]   21    1    0    0
## [233,]   28    3    2    0
## [234,]   37    3    0    2
## [235,]   25    1    1    0
## [236,]   32    1    0    0
## [237,]   25    1    1    0
## [238,]   31    1    0    0
## [239,]   38    6    0    2
## [240,]   26    2    1    1
## [241,]   31    1    1    0
## [242,]   31    2    0    1
## [243,]   25    1    0    1
## [244,]   31    1    0    1
## [245,]   34    1    0    0
## [246,]   35    2    2    0
## [247,]   29    1    0    1
## [248,]   23    1    0    1
infert$case  #Real outcome
##   [1] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
##  [36] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
##  [71] 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## [106] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## [141] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## [176] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## [211] 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
## [246] 0 0 0
nn$net.result[[1]] #Predict outcome using the train data
##                 [,1]
## 1   0.15393231441329
## 2   0.59566834982867
## 3   0.14516313577733
## 4   0.15226184852471
## 5   0.35708627802210
## 6   0.52428713997859
## 7   0.14965737188372
## 8   0.14679615878672
## 9   0.31712746099080
## 10  0.14610121784251
## 11  0.16159021458046
## 12  0.55627128900829
## 13  0.29944921753995
## 14  0.17448343813142
## 15  0.73689488529885
## 16  0.31682012921245
## 17  0.17390826767738
## 18  0.43832650459574
## 19  0.65679322910641
## 20  0.87161763700106
## 21  0.82003746591951
## 22  0.52964908037439
## 23  0.87052105435810
## 24  0.73843863709379
## 25  0.62369468758594
## 26  0.93071278578936
## 27  0.92599553303211
## 28  0.65111686675344
## 29  0.61843807073186
## 30  0.14955857119196
## 31  0.62564337601092
## 32  0.17470026971625
## 33  0.59566834982867
## 34  0.90163366094246
## 35  0.50002184841591
## 36  0.91619037052885
## 37  0.73843863709379
## 38  0.92835656418618
## 39  0.14511322055218
## 40  0.49853573503472
## 41  0.77994325079946
## 42  0.16377064775903
## 43  0.18281127400204
## 44  0.84825026617897
## 45  0.15550633042136
## 46  0.43969585740584
## 47  0.87052105435810
## 48  0.14750027529159
## 49  0.29807277700016
## 50  0.75816020673774
## 51  0.87935979860613
## 52  0.35322477416641
## 53  0.22948575603004
## 54  0.49853573503472
## 55  0.87991232653367
## 56  0.91989313805808
## 57  0.15977161320932
## 58  0.87052105435810
## 59  0.49853573503472
## 60  0.85981444618755
## 61  0.91619037052885
## 62  0.41038248732414
## 63  0.68534828079627
## 64  0.89533615853523
## 65  0.15671280241912
## 66  0.62564337601092
## 67  0.31712746099080
## 68  0.90681956689598
## 69  0.73427025518650
## 70  0.22159237700764
## 71  0.31805467543904
## 72  0.41038248732414
## 73  0.59415563841970
## 74  0.14778008821079
## 75  0.84763534459698
## 76  0.59415563841970
## 77  0.22064264734628
## 78  0.22159237700764
## 79  0.59415563841970
## 80  0.68534828079627
## 81  0.52964908037439
## 82  0.53012992683176
## 83  0.35989137902396
## 84  0.00001364255274
## 85  0.18751945701512
## 86  0.14516313577733
## 87  0.14698488529722
## 88  0.20463863853357
## 89  0.17773371833090
## 90  0.14965737188372
## 91  0.43789300757506
## 92  0.31712746099080
## 93  0.19810326593083
## 94  0.14624647355882
## 95  0.18180089123615
## 96  0.15679820060897
## 97  0.15316994400318
## 98  0.16593299479476
## 99  0.15815328914271
## 100 0.15301184310934
## 101 0.15160471985657
## 102 0.15013177969111
## 103 0.19879700563464
## 104 0.17863093337206
## 105 0.14757015027667
## 106 0.14610121784251
## 107 0.16608615640953
## 108 0.19220700528637
## 109 0.14962583672582
## 110 0.14866060285144
## 111 0.14523850631656
## 112 0.19128137404998
## 113 0.16126890519138
## 114 0.15824921851621
## 115 0.17470026971625
## 116 0.59566834982867
## 117 0.14679615878672
## 118 0.17491849181837
## 119 0.14757015027667
## 120 0.16608615640953
## 121 0.19800903126944
## 122 0.52476676684681
## 123 0.25403200575170
## 124 0.15231485980226
## 125 0.16377064775903
## 126 0.18281127400204
## 127 0.14825484117184
## 128 0.28265322088846
## 129 0.16869054650633
## 130 0.15977161320932
## 131 0.29421846939633
## 132 0.15671280241912
## 133 0.15151261733332
## 134 0.01706085484216
## 135 0.14950270352728
## 136 0.22948575603004
## 137 0.25403200575170
## 138 0.16159021458046
## 139 0.18253458543536
## 140 0.65429664172827
## 141 0.33721427355886
## 142 0.25403200575170
## 143 0.15815328914271
## 144 0.52964908037439
## 145 0.22159237700764
## 146 0.16171135856014
## 147 0.14659169205973
## 148 0.18697273766904
## 149 0.15824921851621
## 150 0.31712746099080
## 151 0.17123008214726
## 152 0.40712127548404
## 153 0.15087971906469
## 154 0.31805467543904
## 155 0.15087971906469
## 156 0.59415563841970
## 157 0.84763534459698
## 158 0.15679820060897
## 159 0.40997406366950
## 160 0.15087971906469
## 161 0.15679820060897
## 162 0.16171135856014
## 163 0.14757015027667
## 164 0.53012992683176
## 165 0.35989137902396
## 166 0.00001364255274
## 167 0.18751945701512
## 168 0.14516313577733
## 169 0.23887270469096
## 170 0.14527978734780
## 171 0.26369723266552
## 172 0.14965737188372
## 173 0.23011197802549
## 174 0.19279788405130
## 175 0.19810326593083
## 176 0.20474477503670
## 177 0.14783597452023
## 178 0.15679820060897
## 179 0.49328987759573
## 180 0.16593299479476
## 181 0.14597159820370
## 182 0.24980859269390
## 183 0.23117967816182
## 184 0.21172899268451
## 185 0.65759404992524
## 186 0.17863093337206
## 187 0.14757015027667
## 188 0.33721427355886
## 189 0.73843863709379
## 190 0.85981444618755
## 191 0.14962583672582
## 192 0.14866060285144
## 193 0.14523850631656
## 194 0.31352813598057
## 195 0.16126890519138
## 196 0.15824921851621
## 197 0.14728220476893
## 198 0.18751945701512
## 199 0.43789300757506
## 200 0.17491849181837
## 201 0.14757015027667
## 202 0.16608615640953
## 203 0.19800903126944
## 204 0.15417492151812
## 205 0.15333114951615
## 206 0.14550097315762
## 207 0.16377064775903
## 208 0.18281127400204
## 209 0.14825484117184
## 210 0.15550633042136
## 211 0.76197664635892
## 212 0.15977161320932
## 213 0.14750027529159
## 214 0.15671280241912
## 215 0.15151261733332
## 216 0.00173456469897
## 217 0.20346633699350
## 218 0.16826228502981
## 219 0.49853573503472
## 220 0.68356704883408
## 221 0.28097837242691
## 222 0.33721427355886
## 223 0.15977161320932
## 224 0.15333114951615
## 225 0.15815328914271
## 226 0.17838523965602
## 227 0.22159237700764
## 228 0.16171135856014
## 229 0.16593299479476
## 230 0.84763534459698
## 231 0.31805467543904
## 232 0.14868567778885
## 233 0.17123008214726
## 234 0.73427025518650
## 235 0.22159237700764
## 236 0.15824921851621
## 237 0.22159237700764
## 238 0.15679820060897
## 239 0.14778008821079
## 240 0.59216148759090
## 241 0.29944921753995
## 242 0.22064264734628
## 243 0.41038248732414
## 244 0.59415563841970
## 245 0.16171135856014
## 246 0.52964908037439
## 247 0.53012992683176
## 248 0.35989137902396
#check the error rate
nn1= ifelse(nn$net.result[[1]]>0.5,1,0)
nn1
##     [,1]
## 1      0
## 2      1
## 3      0
## 4      0
## 5      0
## 6      1
## 7      0
## 8      0
## 9      0
## 10     0
## 11     0
## 12     1
## 13     0
## 14     0
## 15     1
## 16     0
## 17     0
## 18     0
## 19     1
## 20     1
## 21     1
## 22     1
## 23     1
## 24     1
## 25     1
## 26     1
## 27     1
## 28     1
## 29     1
## 30     0
## 31     1
## 32     0
## 33     1
## 34     1
## 35     1
## 36     1
## 37     1
## 38     1
## 39     0
## 40     0
## 41     1
## 42     0
## 43     0
## 44     1
## 45     0
## 46     0
## 47     1
## 48     0
## 49     0
## 50     1
## 51     1
## 52     0
## 53     0
## 54     0
## 55     1
## 56     1
## 57     0
## 58     1
## 59     0
## 60     1
## 61     1
## 62     0
## 63     1
## 64     1
## 65     0
## 66     1
## 67     0
## 68     1
## 69     1
## 70     0
## 71     0
## 72     0
## 73     1
## 74     0
## 75     1
## 76     1
## 77     0
## 78     0
## 79     1
## 80     1
## 81     1
## 82     1
## 83     0
## 84     0
## 85     0
## 86     0
## 87     0
## 88     0
## 89     0
## 90     0
## 91     0
## 92     0
## 93     0
## 94     0
## 95     0
## 96     0
## 97     0
## 98     0
## 99     0
## 100    0
## 101    0
## 102    0
## 103    0
## 104    0
## 105    0
## 106    0
## 107    0
## 108    0
## 109    0
## 110    0
## 111    0
## 112    0
## 113    0
## 114    0
## 115    0
## 116    1
## 117    0
## 118    0
## 119    0
## 120    0
## 121    0
## 122    1
## 123    0
## 124    0
## 125    0
## 126    0
## 127    0
## 128    0
## 129    0
## 130    0
## 131    0
## 132    0
## 133    0
## 134    0
## 135    0
## 136    0
## 137    0
## 138    0
## 139    0
## 140    1
## 141    0
## 142    0
## 143    0
## 144    1
## 145    0
## 146    0
## 147    0
## 148    0
## 149    0
## 150    0
## 151    0
## 152    0
## 153    0
## 154    0
## 155    0
## 156    1
## 157    1
## 158    0
## 159    0
## 160    0
## 161    0
## 162    0
## 163    0
## 164    1
## 165    0
## 166    0
## 167    0
## 168    0
## 169    0
## 170    0
## 171    0
## 172    0
## 173    0
## 174    0
## 175    0
## 176    0
## 177    0
## 178    0
## 179    0
## 180    0
## 181    0
## 182    0
## 183    0
## 184    0
## 185    1
## 186    0
## 187    0
## 188    0
## 189    1
## 190    1
## 191    0
## 192    0
## 193    0
## 194    0
## 195    0
## 196    0
## 197    0
## 198    0
## 199    0
## 200    0
## 201    0
## 202    0
## 203    0
## 204    0
## 205    0
## 206    0
## 207    0
## 208    0
## 209    0
## 210    0
## 211    1
## 212    0
## 213    0
## 214    0
## 215    0
## 216    0
## 217    0
## 218    0
## 219    0
## 220    1
## 221    0
## 222    0
## 223    0
## 224    0
## 225    0
## 226    0
## 227    0
## 228    0
## 229    0
## 230    1
## 231    0
## 232    0
## 233    0
## 234    1
## 235    0
## 236    0
## 237    0
## 238    0
## 239    0
## 240    1
## 241    0
## 242    0
## 243    0
## 244    1
## 245    0
## 246    1
## 247    1
## 248    0
misClsificationError= mean(infert$case!=nn1)
misClsificationError   #23.39%
## [1] 0.2298387097
OutPutVsPred=cbind(infert$case,nn1)
OutPutVsPred
##     [,1] [,2]
## 1      1    0
## 2      1    1
## 3      1    0
## 4      1    0
## 5      1    0
## 6      1    1
## 7      1    0
## 8      1    0
## 9      1    0
## 10     1    0
## 11     1    0
## 12     1    1
## 13     1    0
## 14     1    0
## 15     1    1
## 16     1    0
## 17     1    0
## 18     1    0
## 19     1    1
## 20     1    1
## 21     1    1
## 22     1    1
## 23     1    1
## 24     1    1
## 25     1    1
## 26     1    1
## 27     1    1
## 28     1    1
## 29     1    1
## 30     1    0
## 31     1    1
## 32     1    0
## 33     1    1
## 34     1    1
## 35     1    1
## 36     1    1
## 37     1    1
## 38     1    1
## 39     1    0
## 40     1    0
## 41     1    1
## 42     1    0
## 43     1    0
## 44     1    1
## 45     1    0
## 46     1    0
## 47     1    1
## 48     1    0
## 49     1    0
## 50     1    1
## 51     1    1
## 52     1    0
## 53     1    0
## 54     1    0
## 55     1    1
## 56     1    1
## 57     1    0
## 58     1    1
## 59     1    0
## 60     1    1
## 61     1    1
## 62     1    0
## 63     1    1
## 64     1    1
## 65     1    0
## 66     1    1
## 67     1    0
## 68     1    1
## 69     1    1
## 70     1    0
## 71     1    0
## 72     1    0
## 73     1    1
## 74     1    0
## 75     1    1
## 76     1    1
## 77     1    0
## 78     1    0
## 79     1    1
## 80     1    1
## 81     1    1
## 82     1    1
## 83     1    0
## 84     0    0
## 85     0    0
## 86     0    0
## 87     0    0
## 88     0    0
## 89     0    0
## 90     0    0
## 91     0    0
## 92     0    0
## 93     0    0
## 94     0    0
## 95     0    0
## 96     0    0
## 97     0    0
## 98     0    0
## 99     0    0
## 100    0    0
## 101    0    0
## 102    0    0
## 103    0    0
## 104    0    0
## 105    0    0
## 106    0    0
## 107    0    0
## 108    0    0
## 109    0    0
## 110    0    0
## 111    0    0
## 112    0    0
## 113    0    0
## 114    0    0
## 115    0    0
## 116    0    1
## 117    0    0
## 118    0    0
## 119    0    0
## 120    0    0
## 121    0    0
## 122    0    1
## 123    0    0
## 124    0    0
## 125    0    0
## 126    0    0
## 127    0    0
## 128    0    0
## 129    0    0
## 130    0    0
## 131    0    0
## 132    0    0
## 133    0    0
## 134    0    0
## 135    0    0
## 136    0    0
## 137    0    0
## 138    0    0
## 139    0    0
## 140    0    1
## 141    0    0
## 142    0    0
## 143    0    0
## 144    0    1
## 145    0    0
## 146    0    0
## 147    0    0
## 148    0    0
## 149    0    0
## 150    0    0
## 151    0    0
## 152    0    0
## 153    0    0
## 154    0    0
## 155    0    0
## 156    0    1
## 157    0    1
## 158    0    0
## 159    0    0
## 160    0    0
## 161    0    0
## 162    0    0
## 163    0    0
## 164    0    1
## 165    0    0
## 166    0    0
## 167    0    0
## 168    0    0
## 169    0    0
## 170    0    0
## 171    0    0
## 172    0    0
## 173    0    0
## 174    0    0
## 175    0    0
## 176    0    0
## 177    0    0
## 178    0    0
## 179    0    0
## 180    0    0
## 181    0    0
## 182    0    0
## 183    0    0
## 184    0    0
## 185    0    1
## 186    0    0
## 187    0    0
## 188    0    0
## 189    0    1
## 190    0    1
## 191    0    0
## 192    0    0
## 193    0    0
## 194    0    0
## 195    0    0
## 196    0    0
## 197    0    0
## 198    0    0
## 199    0    0
## 200    0    0
## 201    0    0
## 202    0    0
## 203    0    0
## 204    0    0
## 205    0    0
## 206    0    0
## 207    0    0
## 208    0    0
## 209    0    0
## 210    0    0
## 211    0    1
## 212    0    0
## 213    0    0
## 214    0    0
## 215    0    0
## 216    0    0
## 217    0    0
## 218    0    0
## 219    0    0
## 220    0    1
## 221    0    0
## 222    0    0
## 223    0    0
## 224    0    0
## 225    0    0
## 226    0    0
## 227    0    0
## 228    0    0
## 229    0    0
## 230    0    1
## 231    0    0
## 232    0    0
## 233    0    0
## 234    0    1
## 235    0    0
## 236    0    0
## 237    0    0
## 238    0    0
## 239    0    0
## 240    0    1
## 241    0    0
## 242    0    0
## 243    0    0
## 244    0    1
## 245    0    0
## 246    0    1
## 247    0    1
## 248    0    0
###Using Backpropagation Algorithm and playing with learningrate and entorpy
nn.bp=neuralnet(
  formula= case~age+parity+induced+spontaneous,
  data=infert,hidden=2,learningrate=0.01,
  algorithm="backprop",err.fct="ce",
  linear.output=FALSE)

nn.bp
## Call: neuralnet(formula = case ~ age + parity + induced + spontaneous,     data = infert, hidden = 2, learningrate = 0.01, algorithm = "backprop",     err.fct = "ce", linear.output = FALSE)
## 
## 1 repetition was calculated.
## 
##         Error Reached Threshold Steps
## 1 158.0855558    0.008284001727    20
nn ##nn did a better job
## Call: neuralnet(formula = case ~ age + parity + induced + spontaneous,     data = infert, hidden = 2, err.fct = "ce", linear.output = FALSE)
## 
## 1 repetition was calculated.
## 
##         Error Reached Threshold Steps
## 1 123.8188988    0.009886243403 14194
##Prediction
new.output=compute(nn,covariate=matrix(c(22,1,0,0,22,1,1,0,22,1,0,1,22,1,1,1),byrow=TRUE,ncol=4))
new.output$net.result #From the result, we can find the 4th person who are 22,parity, induced and spontaneous has a 85.19% change to be control in term of case.
##              [,1]
## [1,] 0.1491437113
## [2,] 0.1987688654
## [3,] 0.3375371732
## [4,] 0.8715737411
#We make the outcome as 1 if the percentage is more than 50%
nn2= ifelse(new.output$net.result>0.5,1,0)
nn2 #Only the 4th person is 1
##      [,1]
## [1,]    0
## [2,]    0
## [3,]    0
## [4,]    1
###Confidence Intervals
ci=confidence.interval(nn,alpha=0.05) #it does not work for the model with BackPropagation algorithm
ci
## $lower.ci
## $lower.ci[[1]]
## $lower.ci[[1]][[1]]
##              [,1]           [,2]
## [1,] -2.782284630  -0.8527047096
## [2,] -4.444022524  -0.4072937480
## [3,]  1.238925075  -7.0327039521
## [4,] 14.746489356 -12.4066059982
## [5,] -1.154477148 -20.5144604437
## 
## $lower.ci[[1]][[2]]
##                [,1]
## [1,]   -17.89673156
## [2,] -1081.86542931
## [3,]   -26.03144144
## 
## 
## 
## $upper.ci
## $upper.ci[[1]]
## $upper.ci[[1]][[1]]
##              [,1]          [,2]
## [1,] 10.366728176 11.8152910769
## [2,] -1.529913266  0.1801257987
## [3,]  3.209558399 10.9077795669
## [4,] 41.044513892  7.4099201761
## [5,]  1.199952053 13.0297308421
## 
## $upper.ci[[1]][[2]]
##                [,1]
## [1,]    23.69673784
## [2,] -1078.52229333
## [3,]    16.68101282
## 
## 
## 
## $nic
## [1] 134.833922
###Visualize the Results
##Split the network into different plot
par(mfrow=c(2,2))
gwplot(nn,selected.covariate="age",min=-2.5,max=5)
gwplot(nn,selected.covariate="parity",min=-2.5,max=5)
gwplot(nn,selected.covariate="induced",min=-2.5,max=5)
gwplot(nn,selected.covariate="spontaneous",min=-2.5,max=5)

Method2:Multinomial Log-linear Models

library(nnet)
data(mtcars)
library(caret)
## Loading required package: lattice
## Loading required package: ggplot2
trainindex<- createDataPartition(y=mtcars$cyl,p=0.7,list=FALSE)
training<- mtcars[trainindex,]
testing<- mtcars[-trainindex,]
cylModel<- multinom(cyl~.,data=training,maxit=500)
## # weights:  36 (22 variable)
## initial  value 25.268083 
## iter  10 value 2.466670
## iter  20 value 0.036268
## iter  30 value 0.000230
## final  value 0.000050 
## converged
topModels<- varImp(cylModel)
topModels$Variables<- row.names(topModels)
topModels<- topModels[order(-topModels$Overall),] 
##Importance
#Make the order decreasing and check the most 5 important variables
head(topModels,5)
##          Overall Variables
## vs   51.06146293        vs
## drat 49.51192817      drat
## gear 34.53692949      gear
## carb 29.44117979      carb
## am   23.46641667        am
##Prediction
#Both methods are the same, athe difference is the way how they present
#1.Probs
pred1<- predict(cylModel,type="probs",newdata=testing)
head(pred1,5)
##                                                                                  4
## Mazda RX4      0.99999503622827012883078623417532071471214294433593750000000000000
## Mazda RX4 Wag  0.99994340432097927884314003676990978419780731201171875000000000000
## Hornet 4 Drive 0.00000000000120664757750777263827110897783340224143772134723917588
## Duster 360     0.00000000000000000000000000000000000000000000000000000006669009651
## Merc 450SLC    0.00000000000000000000000000000000000224537238180362712685656155907
##                                                   6
## Mazda RX4      0.0000044022970556656173209456560380
## Mazda RX4 Wag  0.0000294426508037891794544466811390
## Hornet 4 Drive 0.9999999999987934096168373798718676
## Duster 360     0.0000000000000000000000001966360747
## Merc 450SLC    0.0000000000000000000094164298609383
##                                           8
## Mazda RX4      0.00000056147467421563783176
## Mazda RX4 Wag  0.00002715302821698180665974
## Hornet 4 Drive 0.00000000000000001650297471
## Duster 360     1.00000000000000000000000000
## Merc 450SLC    1.00000000000000000000000000
#2.class (Recommend)
pred2<- predict(cylModel,type="class",newdata=testing)
head(pred2,5)
## [1] 4 4 6 8 8
## Levels: 4 6 8
##Evaluation (caret)
postResample(testing$cyl,pred2) #88.89%
##     Accuracy        Kappa 
## 0.7777777778 0.6666666667