# Working on Logistic Regration
library(caTools)
library(caret)
## Loading required package: lattice
## Loading required package: ggplot2
library(dplyr)
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
library(ggplot2)
d <- read.csv("diabetes.csv", na.strings=c("","","NA"))
View(d)
str(d)
## 'data.frame': 768 obs. of 9 variables:
## $ Pregnancies : int 6 1 8 1 0 5 3 10 2 8 ...
## $ Glucose : int 148 85 183 89 137 116 78 115 197 125 ...
## $ BloodPressure : int 72 66 64 66 40 74 50 0 70 96 ...
## $ SkinThickness : int 35 29 0 23 35 0 32 0 45 0 ...
## $ Insulin : int 0 0 0 94 168 0 88 0 543 0 ...
## $ BMI : num 33.6 26.6 23.3 28.1 43.1 25.6 31 35.3 30.5 0 ...
## $ DiabetesPedigreeFunction: num 0.627 0.351 0.672 0.167 2.288 ...
## $ Age : int 50 31 32 21 33 30 26 29 53 54 ...
## $ Outcome : int 1 0 1 0 1 0 1 0 1 1 ...
dim(d)
## [1] 768 9
head(d)
## Pregnancies Glucose BloodPressure SkinThickness Insulin BMI
## 1 6 148 72 35 0 33.6
## 2 1 85 66 29 0 26.6
## 3 8 183 64 0 0 23.3
## 4 1 89 66 23 94 28.1
## 5 0 137 40 35 168 43.1
## 6 5 116 74 0 0 25.6
## DiabetesPedigreeFunction Age Outcome
## 1 0.627 50 1
## 2 0.351 31 0
## 3 0.672 32 1
## 4 0.167 21 0
## 5 2.288 33 1
## 6 0.201 30 0
# Splitiong dataset into training and testing part
split <- sample.split(d, SplitRatio = 0.7)
split
## [1] TRUE TRUE TRUE TRUE FALSE FALSE TRUE FALSE TRUE
training <- subset(d, split == "TRUE")
testing <- subset(d, split == "FALSE")
model <- glm(Outcome~., training, family = "binomial")
summary(model)
##
## Call:
## glm(formula = Outcome ~ ., family = "binomial", data = training)
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -2.6287 -0.7699 -0.4004 0.7463 2.5478
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) -8.619606 0.912215 -9.449 < 2e-16 ***
## Pregnancies 0.143067 0.040439 3.538 0.000403 ***
## Glucose 0.035084 0.004417 7.943 1.98e-15 ***
## BloodPressure -0.008761 0.006439 -1.361 0.173636
## SkinThickness -0.001337 0.008533 -0.157 0.875515
## Insulin -0.001076 0.001174 -0.916 0.359430
## BMI 0.096324 0.018443 5.223 1.76e-07 ***
## DiabetesPedigreeFunction 0.830824 0.360762 2.303 0.021280 *
## Age 0.009852 0.010930 0.901 0.367385
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 680.40 on 512 degrees of freedom
## Residual deviance: 496.01 on 504 degrees of freedom
## AIC: 514.01
##
## Number of Fisher Scoring iterations: 5
# Now we will see for the non significance value and reduce it from the table.
# We are doing this to check the dependece of the model
# Null deviance: 672.12 on 511 degrees of freedom
# Residual deviance: 494.21 on 503 degrees of freedom
# AIC: 512.21
model <- glm(Outcome~.-DiabetesPedigreeFunction, training, family = "binomial")
summary(model)
##
## Call:
## glm(formula = Outcome ~ . - DiabetesPedigreeFunction, family = "binomial",
## data = training)
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -2.2797 -0.7490 -0.4022 0.7572 2.4931
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) -8.2399879 0.8812793 -9.350 < 2e-16 ***
## Pregnancies 0.1411555 0.0401704 3.514 0.000442 ***
## Glucose 0.0347560 0.0043506 7.989 1.36e-15 ***
## BloodPressure -0.0094391 0.0064582 -1.462 0.143861
## SkinThickness 0.0012288 0.0083953 0.146 0.883634
## Insulin -0.0009008 0.0011518 -0.782 0.434159
## BMI 0.0960373 0.0184411 5.208 1.91e-07 ***
## Age 0.0113004 0.0108825 1.038 0.299082
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 680.40 on 512 degrees of freedom
## Residual deviance: 501.42 on 505 degrees of freedom
## AIC: 517.42
##
## Number of Fisher Scoring iterations: 5
# Null deviance: 672.12 on 511 degrees of freedom
# Residual deviance: 506.44 on 504 degrees of freedom
# AIC: 522.44
model <- glm(Outcome~.-BloodPressure, training, family = "binomial")
summary(model)
##
## Call:
## glm(formula = Outcome ~ . - BloodPressure, family = "binomial",
## data = training)
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -2.7024 -0.7566 -0.4070 0.7664 2.4291
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) -8.978161 0.881546 -10.185 < 2e-16 ***
## Pregnancies 0.137305 0.040054 3.428 0.000608 ***
## Glucose 0.034899 0.004403 7.926 2.26e-15 ***
## SkinThickness -0.003222 0.008380 -0.385 0.700592
## Insulin -0.001073 0.001169 -0.918 0.358512
## BMI 0.093010 0.018257 5.094 3.50e-07 ***
## DiabetesPedigreeFunction 0.852794 0.360744 2.364 0.018079 *
## Age 0.007833 0.010839 0.723 0.469858
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 680.40 on 512 degrees of freedom
## Residual deviance: 497.87 on 505 degrees of freedom
## AIC: 513.87
##
## Number of Fisher Scoring iterations: 5
# Null deviance: 672.12 on 511 degrees of freedom
# Residual deviance: 497.55 on 504 degrees of freedom
# AIC: 513.55
model <- glm(Outcome~.-Insulin, training, family = "binomial")
summary(model)
##
## Call:
## glm(formula = Outcome ~ . - Insulin, family = "binomial", data = training)
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -2.7247 -0.7715 -0.3944 0.7431 2.5579
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) -8.515822 0.902355 -9.437 < 2e-16 ***
## Pregnancies 0.142352 0.040262 3.536 0.000407 ***
## Glucose 0.033698 0.004106 8.206 2.28e-16 ***
## BloodPressure -0.008725 0.006407 -1.362 0.173243
## SkinThickness -0.004915 0.007517 -0.654 0.513221
## BMI 0.097218 0.018361 5.295 1.19e-07 ***
## DiabetesPedigreeFunction 0.814126 0.360994 2.255 0.024119 *
## Age 0.010756 0.010857 0.991 0.321804
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 680.40 on 512 degrees of freedom
## Residual deviance: 496.85 on 505 degrees of freedom
## AIC: 512.85
##
## Number of Fisher Scoring iterations: 5
# Null deviance: 672.12 on 511 degrees of freedom
# Residual deviance: 495.51 on 504 degrees of freedom
# AIC: 511.51
model <- glm(Outcome~.-SkinThickness, training, family = "binomial")
summary(model)
##
## Call:
## glm(formula = Outcome ~ . - SkinThickness, family = "binomial",
## data = training)
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -2.6169 -0.7663 -0.3994 0.7474 2.5447
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) -8.612647 0.910497 -9.459 < 2e-16 ***
## Pregnancies 0.143318 0.040441 3.544 0.000394 ***
## Glucose 0.035193 0.004367 8.059 7.69e-16 ***
## BloodPressure -0.008925 0.006353 -1.405 0.160073
## Insulin -0.001161 0.001041 -1.116 0.264485
## BMI 0.095439 0.017540 5.441 5.29e-08 ***
## DiabetesPedigreeFunction 0.822985 0.356997 2.305 0.021150 *
## Age 0.009912 0.010933 0.907 0.364635
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 680.40 on 512 degrees of freedom
## Residual deviance: 496.03 on 505 degrees of freedom
## AIC: 512.03
##
## Number of Fisher Scoring iterations: 5
# Null deviance: 672.12 on 511 degrees of freedom
# Residual deviance: 494.23 on 504 degrees of freedom
# AIC: 510.23
res <- predict(model, training, type = "response")
res
## 1 2 3 4 7 9
## 0.737210544 0.050584600 0.817113125 0.047066043 0.071377782 0.714584413
## 10 11 12 13 16 18
## 0.039253763 0.279641522 0.922590420 0.791764262 0.374846423 0.238128935
## 19 20 21 22 25 27
## 0.354701759 0.249046514 0.447934761 0.368584169 0.767718090 0.783062866
## 28 29 30 31 34 36
## 0.048602186 0.593667718 0.333774791 0.437925821 0.047216850 0.146738755
## 37 38 39 40 43 45
## 0.731701818 0.433330620 0.191819834 0.555709755 0.134181798 0.661160219
## 46 47 48 49 52 54
## 0.950127729 0.439090394 0.043962131 0.437430596 0.082965920 0.852156432
## 55 56 57 58 61 63
## 0.731477062 0.023221673 0.902479985 0.391495431 0.007307736 0.024721448
## 64 65 66 67 70 72
## 0.311554278 0.394725650 0.139377850 0.201010938 0.355563606 0.398294670
## 73 74 75 76 79 81
## 0.872099293 0.307318447 0.059497419 0.002010977 0.645765430 0.106451596
## 82 83 84 85 88 90
## 0.004408746 0.156746166 0.053281660 0.763579444 0.214609650 0.082579218
## 91 92 93 94 97 99
## 0.019812479 0.294532104 0.395886334 0.294828918 0.092717449 0.149267659
## 100 101 102 103 106 108
## 0.520849541 0.842821542 0.302715220 0.075912847 0.248815539 0.441887107
## 109 110 111 112 115 117
## 0.118952316 0.113124992 0.649224311 0.732671939 0.753535897 0.403719101
## 118 119 120 121 124 126
## 0.171204671 0.131567633 0.063745790 0.904124153 0.361263381 0.539899585
## 127 128 129 130 133 135
## 0.505636749 0.210396729 0.216728978 0.144300564 0.700674299 0.056926025
## 136 137 138 139 142 144
## 0.265122576 0.105531759 0.069973129 0.280274691 0.373874066 0.441433071
## 145 146 147 148 151 153
## 0.512623458 0.006603844 0.082033655 0.288036146 0.390586492 0.868534951
## 154 155 156 157 160 162
## 0.571546801 0.972466976 0.913771400 0.088263079 0.978685766 0.345713208
## 163 164 165 166 169 171
## 0.264589239 0.115842530 0.300843308 0.253477958 0.261600842 0.189930127
## 172 173 174 175 178 180
## 0.574289160 0.164915867 0.207047623 0.054396680 0.847955007 0.696842570
## 181 182 183 184 187 189
## 0.057081781 0.279423888 0.002332152 0.056436000 0.844944498 0.280073922
## 190 191 192 193 196 198
## 0.414995591 0.087246700 0.533767202 0.725226098 0.739541772 0.113581012
## 199 200 201 202 205 207
## 0.362211100 0.407150734 0.187881660 0.488568033 0.358623448 0.955637368
## 208 209 210 211 214 216
## 0.753000424 0.104469527 0.900514551 0.053111375 0.560083207 0.921173409
## 217 218 219 220 223 225
## 0.337218030 0.368716777 0.186502509 0.421953344 0.382798190 0.072696697
## 226 227 228 229 232 234
## 0.073269297 0.129131040 0.798866224 0.958165552 0.706908217 0.401141610
## 235 236 237 238 241 243
## 0.050499812 0.895578042 0.900408572 0.868183341 0.063863033 0.314164239
## 244 245 246 247 250 252
## 0.445886468 0.528086041 0.939412426 0.525862055 0.107923768 0.203879251
## 253 254 255 256 259 261
## 0.040502657 0.088458566 0.337196964 0.231123055 0.665937377 0.788074117
## 262 263 264 265 268 270
## 0.631012131 0.180097643 0.495479624 0.336914196 0.638532060 0.477950908
## 271 272 273 274 277 279
## 0.780915058 0.081282675 0.143719069 0.047649860 0.205164278 0.270743991
## 280 281 282 283 286 288
## 0.114687313 0.517562057 0.641903113 0.445306702 0.469764314 0.499986940
## 289 290 291 292 295 297
## 0.063272415 0.302359835 0.068340469 0.245552903 0.389488441 0.270184604
## 298 299 300 301 304 306
## 0.170515157 0.572308015 0.343308330 0.791675975 0.684536070 0.343025636
## 307 308 309 310 313 315
## 0.709750756 0.132696045 0.349668375 0.343052867 0.382145573 0.572617782
## 316 317 318 319 322 324
## 0.209308445 0.042537223 0.739656966 0.299434027 0.186066612 0.793868789
## 325 326 327 328 331 333
## 0.211369604 0.269047813 0.329278581 0.915018580 0.479843816 0.933163699
## 334 335 336 337 340 342
## 0.254416854 0.046714166 0.779379436 0.483475005 0.916213533 0.076653523
## 343 344 345 346 349 351
## 0.004135527 0.379276268 0.428214385 0.623544850 0.058965155 0.267833176
## 352 353 354 355 358 360
## 0.381015223 0.055788732 0.069517004 0.275265585 0.924334692 0.875665233
## 361 362 363 364 367 369
## 0.825147390 0.663405864 0.354653194 0.762970765 0.308522487 0.043862642
## 370 371 372 373 376 378
## 0.251116966 0.909296201 0.029262512 0.097006669 0.842297974 0.136709443
## 379 380 381 382 385 387
## 0.870349799 0.271577152 0.169597213 0.039043383 0.111657782 0.377211200
## 388 389 390 391 394 396
## 0.527931935 0.527403814 0.215853310 0.126637386 0.136614173 0.380985087
## 397 398 399 400 403 405
## 0.128221781 0.281196242 0.034225722 0.853422733 0.490221350 0.750735625
## 406 407 408 409 412 414
## 0.521557285 0.247636868 0.047556648 0.933702695 0.196337719 0.222917665
## 415 416 417 418 421 423
## 0.369185228 0.608963911 0.124839792 0.688600605 0.433910664 0.232611699
## 424 425 426 427 430 432
## 0.204905747 0.856158224 0.813660507 0.007798057 0.108261157 0.111226500
## 433 434 435 436 439 441
## 0.053701387 0.225222249 0.093023844 0.701048208 0.026229830 0.759237527
## 442 443 444 445 448 450
## 0.086912395 0.261793148 0.433859083 0.262975463 0.111313669 0.151870282
## 451 452 453 454 457 459
## 0.024788855 0.309442248 0.131066285 0.295439292 0.384111429 0.872348182
## 460 461 462 463 466 468
## 0.578391808 0.322310779 0.020972075 0.230097520 0.098873566 0.163478560
## 469 470 471 472 475 477
## 0.536794201 0.874949963 0.642280302 0.291221305 0.182597439 0.185767097
## 478 479 480 481 484 486
## 0.191343810 0.312675224 0.372866143 0.596315148 0.078131440 0.454236881
## 487 488 489 490 493 495
## 0.461383554 0.919706991 0.096635369 0.905147973 0.170033830 0.006652967
## 496 497 498 499 502 504
## 0.705125967 0.166215507 0.066881863 0.822706935 0.139251607 0.315957431
## 505 506 507 508 511 513
## 0.206174420 0.182325597 0.711586621 0.246006686 0.260310667 0.157352136
## 514 515 516 517 520 522
## 0.081568701 0.077801595 0.579560569 0.701561466 0.189924248 0.267663865
## 523 524 525 526 529 531
## 0.034639127 0.705605181 0.284299343 0.047321527 0.149414187 0.261362008
## 532 533 534 535 538 540
## 0.401432587 0.245350836 0.271595562 0.132528323 0.021834812 0.485961362
## 541 542 543 544 547 549
## 0.468412045 0.325270301 0.448699687 0.141485669 0.964554598 0.598500355
## 550 551 552 553 556 558
## 0.781881273 0.117050085 0.101757244 0.265898632 0.233782665 0.299414355
## 559 560 561 562 565 567
## 0.713216452 0.224660215 0.457276748 0.888994394 0.093666547 0.195037517
## 568 569 570 571 574 576
## 0.163176928 0.555641657 0.203053162 0.086915399 0.134788448 0.292978973
## 577 578 579 580 583 585
## 0.233421368 0.496382287 0.507389743 0.915419003 0.506533259 0.341299232
## 586 587 588 589 592 594
## 0.047989573 0.694398253 0.129496563 0.846281163 0.249553863 0.158180373
## 595 596 597 598 601 603
## 0.453438128 0.708939280 0.119827408 0.094574477 0.091085434 0.150199689
## 604 605 606 607 610 612
## 0.784344928 0.837896411 0.355932974 0.873803762 0.063233157 0.765424529
## 613 614 615 616 619 621
## 0.862313206 0.334152923 0.800921867 0.100238878 0.532197133 0.232725561
## 622 623 624 625 628 630
## 0.144055062 0.967418521 0.195047181 0.139589317 0.261234536 0.067603324
## 631 632 633 634 637 639
## 0.351099812 0.109360968 0.125168543 0.122196078 0.175642259 0.489430421
## 640 641 642 643 646 648
## 0.032210226 0.093083608 0.401634124 0.540934795 0.548760759 0.778342630
## 649 650 651 652 655 657
## 0.545014556 0.074088670 0.045682315 0.243527019 0.127691800 0.048913795
## 658 659 660 661 664 666
## 0.470722376 0.705004389 0.170015880 0.750394404 0.810460221 0.160121669
## 667 668 669 670 673 675
## 0.579493155 0.318482323 0.265576651 0.693098719 0.154352952 0.391569935
## 676 677 678 679 682 684
## 0.881984074 0.618033772 0.114863536 0.354260569 0.845706152 0.362881744
## 685 686 687 688 691 693
## 0.067163962 0.313897680 0.182764516 0.117882162 0.264011635 0.471533059
## 694 695 696 697 700 702
## 0.654807529 0.045374260 0.368202061 0.584356838 0.676410789 0.385792100
## 703 704 705 706 709 711
## 0.778718256 0.632932650 0.131536921 0.193150716 0.810135165 0.453984902
## 712 713 714 715 718 720
## 0.354175639 0.814376453 0.149943891 0.116985723 0.220184045 0.281597279
## 721 722 723 724 727 729
## 0.076442387 0.232098852 0.382215938 0.422260187 0.233105554 0.430261004
## 730 731 732 733 736 738
## 0.087418171 0.253077461 0.295491893 0.868675992 0.207217632 0.135046025
## 739 740 741 742 745 747
## 0.184034264 0.247336855 0.811167649 0.172430243 0.948559439 0.756286809
## 748 749 750 751 754 756
## 0.330175105 0.846560203 0.588160984 0.572143388 0.721595361 0.460660709
## 757 758 759 760 763 765
## 0.542673735 0.324234478 0.194910185 0.915584505 0.104082374 0.354876790
## 766 767 768
## 0.196084536 0.279938025 0.080719632
table(ActualValue = training$Outcome, PredictValue = res >0.3)
## PredictValue
## ActualValue FALSE TRUE
## 0 218 101
## 1 34 160
# Confusion Matrix
((223+153)/(223+102+34+153))*100
## [1] 73.4375
library(ROCR)
## Warning: package 'ROCR' was built under R version 3.5.2
## Loading required package: gplots
##
## Attaching package: 'gplots'
## The following object is masked from 'package:stats':
##
## lowess
ROCRPred = prediction(res,training$Outcome)
ROCRPref = performance(ROCRPred, "tpr", "fpr")
plot(ROCRPref, colorize = TRUE, print.cutoffs.at=seq(0.1, by=0.1))

res <- predict(model, testing, type = "response")
res
## 5 6 8 14 15 17
## 0.87869398 0.17248013 0.65348901 0.61040787 0.63436330 0.41461396
## 23 24 26 32 33 35
## 0.95525983 0.35956642 0.50113402 0.60076359 0.05409353 0.47985928
## 41 42 44 50 51 53
## 0.77296843 0.74982471 0.94765744 0.03151430 0.03992269 0.07534466
## 59 60 62 68 69 71
## 0.82646254 0.20876323 0.57947635 0.49869483 0.03337704 0.20272779
## 77 78 80 86 87 89
## 0.09244337 0.26401803 0.10939160 0.20979316 0.60441289 0.84073601
## 95 96 98 104 105 107
## 0.28028360 0.56200343 0.01701518 0.03562872 0.24929276 0.02649658
## 113 114 116 122 123 125
## 0.06328195 0.10874915 0.57530949 0.33143894 0.17224618 0.15718822
## 131 132 134 140 141 143
## 0.66547809 0.70949427 0.32029768 0.21571207 0.16872082 0.18528524
## 149 150 152 158 159 161
## 0.65046305 0.05265367 0.14244102 0.12224403 0.05858985 0.47621374
## 167 168 170 176 177 179
## 0.49182176 0.33678187 0.14618105 0.89179391 0.14815404 0.79532260
## 185 186 188 194 195 197
## 0.37606893 0.95507626 0.39212450 0.97284013 0.10825615 0.06846334
## 203 204 206 212 213 215
## 0.13606170 0.04270391 0.14820989 0.60667094 0.86245990 0.38189532
## 221 222 224 230 231 233
## 0.71470574 0.68216148 0.63422851 0.34395990 0.75742831 0.03475425
## 239 240 242 248 249 251
## 0.83749529 0.04195724 0.13947379 0.76349367 0.47428682 0.41344454
## 257 258 260 266 267 269
## 0.24092038 0.13489647 0.91398582 0.30951915 0.67349281 0.05629347
## 275 276 278 284 285 287
## 0.58471077 0.30418040 0.08288840 0.68826792 0.12633104 0.67172831
## 293 294 296 302 303 305
## 0.67221347 0.49731452 0.75096135 0.42024962 0.11156517 0.26099902
## 311 312 314 320 321 323
## 0.08612392 0.23101023 0.23640971 0.78789137 0.28135009 0.15543166
## 329 330 332 338 339 341
## 0.27521357 0.20512151 0.08810176 0.30341032 0.81119007 0.17327243
## 347 348 350 356 357 359
## 0.35683982 0.18608518 0.01719839 0.79152834 0.38624996 0.41018258
## 365 366 368 374 375 377
## 0.51954118 0.26165350 0.03880488 0.18334813 0.43139952 0.04212842
## 383 384 386 392 393 395
## 0.12254423 0.09647563 0.09224376 0.89813034 0.10696624 0.71767958
## 401 402 404 410 411 413
## 0.14511200 0.37830871 0.13445600 0.76964237 0.32586390 0.64649458
## 419 420 422 428 429 431
## 0.02565334 0.20257820 0.07163632 0.73523274 0.37689488 0.08269106
## 437 438 440 446 447 449
## 0.81032430 0.52403647 0.41221836 0.99274022 0.08014999 0.14759177
## 455 456 458 464 465 467
## 0.23951697 0.94030981 0.10534922 0.09261794 0.36857300 0.03162532
## 473 474 476 482 483 485
## 0.29327608 0.47391455 0.23856335 0.22091676 0.08017293 0.82265608
## 491 492 494 500 501 503
## 0.14256914 0.10495767 0.43510871 0.65829806 0.09767014 0.02575970
## 509 510 512 518 519 521
## 0.11939094 0.35842264 0.11604603 0.59056350 0.28384594 0.02090463
## 527 528 530 536 537 539
## 0.02844639 0.11437967 0.11025344 0.55540173 0.09299400 0.32147412
## 545 546 548 554 555 557
## 0.07481840 0.88315492 0.34060354 0.07256517 0.11597943 0.17258636
## 563 564 566 572 573 575
## 0.12482608 0.17173486 0.09601913 0.11689798 0.18882386 0.32020481
## 581 582 584 590 591 593
## 0.60579046 0.16395176 0.41120248 0.02929624 0.85746105 0.44986692
## 599 600 602 608 609 611
## 0.71499685 0.08992480 0.15730227 0.03465496 0.53479531 0.15571308
## 617 618 620 626 627 629
## 0.20976474 0.01558273 0.27317656 0.17700780 0.11052023 0.44061294
## 635 636 638 644 645 647
## 0.17100437 0.50123663 0.11973904 0.19933506 0.13302355 0.37951194
## 653 654 656 662 663 665
## 0.36265369 0.20020713 0.54397494 0.96495172 0.79367437 0.39491655
## 671 672 674 680 681 683
## 0.81210549 0.08187466 0.79990212 0.07215223 0.01620047 0.23385814
## 689 690 692 698 699 701
## 0.23105035 0.68758889 0.92259665 0.08978050 0.38390140 0.29699729
## 707 708 710 716 717 719
## 0.06785261 0.26694443 0.19769073 0.91070142 0.79018051 0.19104081
## 725 726 728 734 735 737
## 0.16695212 0.38632507 0.32441692 0.12448219 0.11004882 0.13725387
## 743 744 746 752 753 755
## 0.10271575 0.71832841 0.37179934 0.31460647 0.11686034 0.76137779
## 761 762 764
## 0.09918074 0.95057921 0.35268100
testing
## Pregnancies Glucose BloodPressure SkinThickness Insulin BMI
## 5 0 137 40 35 168 43.1
## 6 5 116 74 0 0 25.6
## 8 10 115 0 0 0 35.3
## 14 1 189 60 23 846 30.1
## 15 5 166 72 19 175 25.8
## 17 0 118 84 47 230 45.8
## 23 7 196 90 0 0 39.8
## 24 9 119 80 35 0 29.0
## 26 10 125 70 26 115 31.1
## 32 3 158 76 36 245 31.6
## 33 3 88 58 11 54 24.8
## 35 10 122 78 31 0 27.6
## 41 3 180 64 25 70 34.0
## 42 7 133 84 0 0 40.2
## 44 9 171 110 24 240 45.4
## 50 7 105 0 0 0 0.0
## 51 1 103 80 11 82 19.4
## 53 5 88 66 21 23 24.4
## 59 0 146 82 0 0 40.5
## 60 0 105 64 41 142 41.5
## 62 8 133 72 0 0 32.9
## 68 2 109 92 0 0 42.7
## 69 1 95 66 13 38 19.6
## 71 2 100 66 20 90 32.9
## 77 7 62 78 0 0 32.6
## 78 5 95 72 33 0 37.7
## 80 2 112 66 22 0 25.0
## 86 2 110 74 29 125 32.4
## 87 13 106 72 54 0 36.6
## 89 15 136 70 32 110 37.1
## 95 2 142 82 18 64 24.7
## 96 6 144 72 27 228 33.9
## 98 1 71 48 18 76 20.4
## 104 1 81 72 18 40 26.6
## 105 2 85 65 0 0 39.6
## 107 1 96 122 0 0 22.4
## 113 1 89 76 34 37 31.2
## 114 4 76 62 0 0 34.0
## 116 4 146 92 0 0 31.2
## 122 6 111 64 39 0 34.2
## 123 2 107 74 30 100 33.6
## 125 0 113 76 0 0 33.3
## 131 4 173 70 14 168 29.7
## 132 9 122 56 0 0 33.3
## 134 8 84 74 31 0 38.3
## 140 5 105 72 29 325 36.9
## 141 3 128 78 0 0 21.1
## 143 2 108 52 26 63 32.5
## 149 5 147 78 0 0 33.7
## 150 2 90 70 17 0 27.3
## 152 4 114 65 0 0 21.9
## 158 1 109 56 21 135 25.2
## 159 2 88 74 19 53 29.0
## 161 4 151 90 38 0 29.7
## 167 3 148 66 25 0 32.5
## 168 4 120 68 0 0 29.6
## 170 3 111 90 12 78 28.4
## 176 8 179 72 42 130 32.7
## 177 6 85 78 0 0 31.2
## 179 5 143 78 0 0 45.0
## 185 4 141 74 0 0 27.6
## 186 7 194 68 28 0 35.9
## 188 1 128 98 41 58 32.0
## 194 11 135 0 0 0 52.3
## 195 8 85 55 20 0 24.4
## 197 1 105 58 0 0 24.3
## 203 0 108 68 20 0 27.3
## 204 2 99 70 16 44 20.4
## 206 5 111 72 28 0 23.9
## 212 0 147 85 54 0 42.8
## 213 7 179 95 31 0 34.2
## 215 9 112 82 32 175 34.2
## 221 0 177 60 29 478 34.6
## 222 2 158 90 0 0 31.6
## 224 7 142 60 33 190 28.8
## 230 0 117 80 31 53 45.2
## 231 4 142 86 0 0 44.0
## 233 1 79 80 25 37 25.4
## 239 9 164 84 21 0 30.8
## 240 0 104 76 0 0 18.4
## 242 4 91 70 32 88 33.1
## 248 0 165 90 33 680 52.3
## 249 9 124 70 33 402 35.4
## 251 9 106 52 0 0 31.2
## 257 3 111 56 39 0 30.1
## 258 2 114 68 22 0 28.7
## 260 11 155 76 28 150 33.3
## 266 5 96 74 18 67 33.6
## 267 0 138 0 0 0 36.3
## 269 0 102 52 0 0 25.1
## 275 13 106 70 0 0 34.2
## 276 2 100 70 52 57 40.5
## 278 0 104 64 23 116 27.8
## 284 7 161 86 0 0 30.4
## 285 2 108 80 0 0 27.0
## 287 5 155 84 44 545 38.7
## 293 2 128 78 37 182 43.3
## 294 1 128 48 45 194 40.5
## 296 6 151 62 31 120 35.5
## 302 2 144 58 33 135 31.6
## 303 5 77 82 41 42 35.8
## 305 3 150 76 0 0 21.0
## 311 6 80 66 30 0 26.2
## 312 0 106 70 37 148 39.4
## 314 3 113 50 10 85 29.5
## 320 6 194 78 0 0 23.5
## 321 4 129 60 12 231 27.5
## 323 0 124 70 20 0 27.4
## 329 2 102 86 36 120 45.5
## 330 6 105 70 32 68 30.8
## 332 2 87 58 16 52 32.7
## 338 5 115 76 0 0 31.2
## 339 9 152 78 34 171 34.2
## 341 1 130 70 13 105 25.9
## 347 1 139 46 19 83 28.7
## 348 3 116 0 0 0 23.5
## 350 5 0 80 32 0 41.0
## 356 9 165 88 0 0 30.4
## 357 1 125 50 40 167 33.3
## 359 12 88 74 40 54 35.3
## 365 4 147 74 25 293 34.9
## 366 5 99 54 28 83 34.0
## 368 0 101 64 17 0 21.0
## 374 2 105 58 40 94 34.9
## 375 2 122 52 43 158 36.2
## 377 0 98 82 15 84 25.2
## 383 1 109 60 8 182 25.4
## 384 1 90 62 18 59 25.1
## 386 1 119 54 13 50 22.3
## 392 5 166 76 0 0 45.7
## 393 1 131 64 14 415 23.7
## 395 4 158 78 0 0 32.9
## 401 4 95 64 0 0 32.0
## 402 6 137 61 0 0 24.2
## 404 9 72 78 25 0 31.6
## 410 1 172 68 49 579 42.4
## 411 6 102 90 39 0 35.7
## 413 1 143 84 23 310 42.4
## 419 1 83 68 0 0 18.2
## 420 3 129 64 29 115 26.4
## 422 2 94 68 18 76 26.0
## 428 1 181 64 30 180 34.1
## 429 0 135 94 46 145 40.6
## 431 2 99 0 0 0 22.2
## 437 12 140 85 33 0 37.4
## 438 5 147 75 0 0 29.9
## 440 6 107 88 0 0 36.8
## 446 0 180 78 63 14 59.4
## 447 1 100 72 12 70 25.3
## 449 0 104 64 37 64 33.6
## 455 2 100 54 28 105 37.8
## 456 14 175 62 30 0 33.6
## 458 5 86 68 28 71 30.2
## 464 5 88 78 30 0 27.6
## 465 10 115 98 0 0 24.0
## 467 0 74 52 10 36 27.8
## 473 0 119 66 27 0 38.8
## 474 7 136 90 0 0 29.9
## 476 0 137 84 27 0 27.3
## 482 0 123 88 37 0 35.2
## 483 4 85 58 22 49 27.8
## 485 0 145 0 0 0 44.2
## 491 2 83 65 28 66 36.8
## 492 2 89 90 30 0 33.5
## 494 4 125 70 18 122 28.9
## 500 6 154 74 32 193 29.3
## 501 2 117 90 19 71 25.2
## 503 6 0 68 41 0 39.0
## 509 2 84 50 23 76 30.4
## 510 8 120 78 0 0 25.0
## 512 0 139 62 17 210 22.1
## 518 7 125 86 0 0 37.6
## 519 13 76 60 0 0 32.8
## 521 2 68 70 32 66 25.0
## 527 1 97 64 19 82 18.2
## 528 3 116 74 15 105 26.3
## 530 0 111 65 0 0 24.6
## 536 4 132 0 0 0 32.9
## 537 0 105 90 0 0 29.6
## 539 0 127 80 37 210 36.3
## 545 1 88 78 29 76 32.0
## 546 8 186 90 35 225 34.5
## 548 4 131 68 21 166 33.1
## 554 1 88 62 24 44 29.9
## 555 1 84 64 23 115 36.9
## 557 1 97 70 40 0 38.1
## 563 1 87 68 34 77 37.6
## 564 6 99 60 19 54 26.9
## 566 2 95 54 14 88 26.1
## 572 2 130 96 0 0 22.6
## 573 3 111 58 31 44 29.5
## 575 1 143 86 30 330 30.1
## 581 0 151 90 46 0 42.1
## 582 6 109 60 27 0 25.0
## 584 8 100 76 0 0 38.7
## 590 0 73 0 0 0 21.1
## 591 11 111 84 40 0 46.8
## 593 3 132 80 0 0 34.4
## 599 1 173 74 0 0 36.8
## 600 1 109 38 18 120 23.1
## 602 6 96 0 0 0 23.7
## 608 1 92 62 25 41 19.5
## 609 0 152 82 39 272 41.5
## 611 3 106 54 21 158 30.9
## 617 6 117 96 0 0 28.7
## 618 2 68 62 13 15 20.1
## 620 0 119 0 0 0 32.4
## 626 4 90 88 47 54 37.7
## 627 0 125 68 0 0 24.7
## 629 5 128 80 0 0 34.6
## 635 10 92 62 0 0 25.9
## 636 13 104 72 0 0 31.2
## 638 2 94 76 18 66 31.6
## 644 4 90 0 0 0 28.0
## 645 3 103 72 30 152 27.6
## 647 1 167 74 17 144 23.4
## 653 5 123 74 40 77 34.1
## 654 2 120 54 0 0 26.8
## 656 2 155 52 27 540 38.7
## 662 1 199 76 43 0 42.9
## 663 8 167 106 46 231 37.6
## 665 6 115 60 39 0 33.7
## 671 6 165 68 26 168 33.6
## 672 1 99 58 10 0 25.4
## 674 3 123 100 35 240 57.3
## 680 2 101 58 17 265 24.2
## 681 2 56 56 28 45 24.2
## 683 0 95 64 39 105 44.6
## 689 1 140 74 26 180 24.1
## 690 1 144 82 46 180 46.1
## 692 13 158 114 0 0 42.3
## 698 0 99 0 0 0 25.0
## 699 4 127 88 11 155 34.5
## 701 2 122 76 27 200 35.9
## 707 10 115 0 0 0 0.0
## 708 2 127 46 21 335 34.4
## 710 2 93 64 32 160 38.0
## 716 7 187 50 33 392 33.9
## 717 3 173 78 39 185 33.8
## 719 1 108 60 46 178 35.5
## 725 1 111 94 0 0 32.8
## 726 4 112 78 40 0 39.4
## 728 0 141 84 26 0 32.4
## 734 2 106 56 27 165 29.0
## 735 2 105 75 0 0 23.3
## 737 0 126 86 27 120 27.4
## 743 1 109 58 18 116 28.5
## 744 9 140 94 0 0 32.7
## 746 12 100 84 33 105 30.0
## 752 1 121 78 39 74 39.0
## 753 3 108 62 24 0 26.0
## 755 8 154 78 32 0 32.4
## 761 2 88 58 26 16 28.4
## 762 9 170 74 31 0 44.0
## 764 10 101 76 48 180 32.9
## DiabetesPedigreeFunction Age Outcome
## 5 2.288 33 1
## 6 0.201 30 0
## 8 0.134 29 0
## 14 0.398 59 1
## 15 0.587 51 1
## 17 0.551 31 1
## 23 0.451 41 1
## 24 0.263 29 1
## 26 0.205 41 1
## 32 0.851 28 1
## 33 0.267 22 0
## 35 0.512 45 0
## 41 0.271 26 0
## 42 0.696 37 0
## 44 0.721 54 1
## 50 0.305 24 0
## 51 0.491 22 0
## 53 0.342 30 0
## 59 1.781 44 0
## 60 0.173 22 0
## 62 0.270 39 1
## 68 0.845 54 0
## 69 0.334 25 0
## 71 0.867 28 1
## 77 0.391 41 0
## 78 0.370 27 0
## 80 0.307 24 0
## 86 0.698 27 0
## 87 0.178 45 0
## 89 0.153 43 1
## 95 0.761 21 0
## 96 0.255 40 0
## 98 0.323 22 0
## 104 0.283 24 0
## 105 0.930 27 0
## 107 0.207 27 0
## 113 0.192 23 0
## 114 0.391 25 0
## 116 0.539 61 1
## 122 0.260 24 0
## 123 0.404 23 0
## 125 0.278 23 1
## 131 0.361 33 1
## 132 1.114 33 1
## 134 0.457 39 0
## 140 0.159 28 0
## 141 0.268 55 0
## 143 0.318 22 0
## 149 0.218 65 0
## 150 0.085 22 0
## 152 0.432 37 0
## 158 0.833 23 0
## 159 0.229 22 0
## 161 0.294 36 0
## 167 0.256 22 0
## 168 0.709 34 0
## 170 0.495 29 0
## 176 0.719 36 1
## 177 0.382 42 0
## 179 0.190 47 0
## 185 0.244 40 0
## 186 0.745 41 1
## 188 1.321 33 1
## 194 0.578 40 1
## 195 0.136 42 0
## 197 0.187 21 0
## 203 0.787 32 0
## 204 0.235 27 0
## 206 0.407 27 0
## 212 0.375 24 0
## 213 0.164 60 0
## 215 0.260 36 1
## 221 1.072 21 1
## 222 0.805 66 1
## 224 0.687 61 0
## 230 0.089 24 0
## 231 0.645 22 1
## 233 0.583 22 0
## 239 0.831 32 1
## 240 0.582 27 0
## 242 0.446 22 0
## 248 0.427 23 0
## 249 0.282 34 0
## 251 0.380 42 0
## 257 0.557 30 0
## 258 0.092 25 0
## 260 1.353 51 1
## 266 0.997 43 0
## 267 0.933 25 1
## 269 0.078 21 0
## 275 0.251 52 0
## 276 0.677 25 0
## 278 0.454 23 0
## 284 0.165 47 1
## 285 0.259 52 1
## 287 0.619 34 0
## 293 1.224 31 1
## 294 0.613 24 1
## 296 0.692 28 0
## 302 0.422 25 1
## 303 0.156 35 0
## 305 0.207 37 0
## 311 0.313 41 0
## 312 0.605 22 0
## 314 0.626 25 0
## 320 0.129 59 1
## 321 0.527 31 0
## 323 0.254 36 1
## 329 0.127 23 1
## 330 0.122 37 0
## 332 0.166 25 0
## 338 0.343 44 1
## 339 0.893 33 1
## 341 0.472 22 0
## 347 0.654 22 0
## 348 0.187 23 0
## 350 0.346 37 1
## 356 0.302 49 1
## 357 0.962 28 1
## 359 0.378 48 0
## 365 0.385 30 0
## 366 0.499 30 0
## 368 0.252 21 0
## 374 0.225 25 0
## 375 0.816 28 0
## 377 0.299 22 0
## 383 0.947 21 0
## 384 1.268 25 0
## 386 0.205 24 0
## 392 0.340 27 1
## 393 0.389 21 0
## 395 0.803 31 1
## 401 0.161 31 1
## 402 0.151 55 0
## 404 0.280 38 0
## 410 0.702 28 1
## 411 0.674 28 0
## 413 1.076 22 0
## 419 0.624 27 0
## 420 0.219 28 1
## 422 0.561 21 0
## 428 0.328 38 1
## 429 0.284 26 0
## 431 0.108 23 0
## 437 0.244 41 0
## 438 0.434 28 0
## 440 0.727 31 0
## 446 2.420 25 1
## 447 0.658 28 0
## 449 0.510 22 1
## 455 0.498 24 0
## 456 0.212 38 1
## 458 0.364 24 0
## 464 0.258 37 0
## 465 1.022 34 0
## 467 0.269 22 0
## 473 0.259 22 0
## 474 0.210 50 0
## 476 0.231 59 0
## 482 0.197 29 0
## 483 0.306 28 0
## 485 0.630 31 1
## 491 0.629 24 0
## 492 0.292 42 0
## 494 1.144 45 1
## 500 0.839 39 0
## 501 0.313 21 0
## 503 0.727 41 1
## 509 0.968 21 0
## 510 0.409 64 0
## 512 0.207 21 0
## 518 0.304 51 0
## 519 0.180 41 0
## 521 0.187 25 0
## 527 0.299 21 0
## 528 0.107 24 0
## 530 0.660 31 0
## 536 0.302 23 1
## 537 0.197 46 0
## 539 0.804 23 0
## 545 0.365 29 0
## 546 0.423 37 1
## 548 0.160 28 0
## 554 0.422 23 0
## 555 0.471 28 0
## 557 0.218 30 0
## 563 0.401 24 0
## 564 0.497 32 0
## 566 0.748 22 0
## 572 0.268 21 0
## 573 0.430 22 0
## 575 0.892 23 0
## 581 0.371 21 1
## 582 0.206 27 0
## 584 0.190 42 0
## 590 0.342 25 0
## 591 0.925 45 1
## 593 0.402 44 1
## 599 0.088 38 1
## 600 0.407 26 0
## 602 0.190 28 0
## 608 0.482 25 0
## 609 0.270 27 0
## 611 0.292 24 0
## 617 0.157 30 0
## 618 0.257 23 0
## 620 0.141 24 1
## 626 0.362 29 0
## 627 0.206 21 0
## 629 0.144 45 0
## 635 0.167 31 0
## 636 0.465 38 1
## 638 0.649 23 0
## 644 0.610 31 0
## 645 0.730 27 0
## 647 0.447 33 1
## 653 0.269 28 0
## 654 0.455 27 0
## 656 0.240 25 1
## 662 1.394 22 1
## 663 0.165 43 1
## 665 0.245 40 1
## 671 0.631 49 0
## 672 0.551 21 0
## 674 0.880 22 0
## 680 0.614 23 0
## 681 0.332 22 0
## 683 0.366 22 0
## 689 0.828 23 0
## 690 0.335 46 1
## 692 0.257 44 1
## 698 0.253 22 0
## 699 0.598 28 0
## 701 0.483 26 0
## 707 0.261 30 1
## 708 0.176 22 0
## 710 0.674 23 1
## 716 0.826 34 1
## 717 0.970 31 1
## 719 0.415 24 0
## 725 0.265 45 0
## 726 0.236 38 0
## 728 0.433 22 0
## 734 0.426 22 0
## 735 0.560 53 0
## 737 0.515 21 0
## 743 0.219 22 0
## 744 0.734 45 1
## 746 0.488 46 0
## 752 0.261 28 0
## 753 0.223 25 0
## 755 0.443 45 1
## 761 0.766 22 0
## 762 0.403 43 1
## 764 0.171 63 0
table(ActualValue = testing$Outcome, PredictValue = res >0.3)
## PredictValue
## ActualValue FALSE TRUE
## 0 122 59
## 1 13 61
((127+66)/(127+48+15+66))*100
## [1] 75.39062
# 73.43$% is the Taining Accuracy for this model
# 75.39% is the Accuracy of testing set for this model
# We can use this model.