#package
library(ggplot2)
library(tidyverse)
## ── Attaching core tidyverse packages ──────────────────────── tidyverse 2.0.0 ──
## ✔ dplyr 1.1.2 ✔ readr 2.1.4
## ✔ forcats 1.0.0 ✔ stringr 1.5.0
## ✔ lubridate 1.9.2 ✔ tibble 3.2.1
## ✔ purrr 1.0.1 ✔ tidyr 1.3.0
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## ✖ dplyr::filter() masks stats::filter()
## ✖ dplyr::lag() masks stats::lag()
## ℹ Use the conflicted package (<http://conflicted.r-lib.org/>) to force all conflicts to become errors
library(gmodels)
library(ggmosaic)
library(corrplot)
## corrplot 0.92 loaded
library(caret)
## Loading required package: lattice
##
## Attaching package: 'caret'
##
## The following object is masked from 'package:purrr':
##
## lift
library(rpart)
library(rpart.plot)
#library(c50)
library(tidyr)
library(dplyr)
#library(knitr)
library(rsample)
library(readxl)
library(MASS)
##
## Attaching package: 'MASS'
##
## The following object is masked from 'package:dplyr':
##
## select
#input
data_pendapatan<-read_xlsx("C:/Users/user/Downloads/Data Latihan.xlsx", sheet="Pendapatan")
head(data_pendapatan)
## # A tibble: 6 × 7
## Pendidikan Klasifikasi.Daerah Jenis.Kelamin Status.KRT Usia Status.Perkawinan
## <dbl> <dbl> <dbl> <dbl> <dbl> <dbl>
## 1 0 0 1 1 49 1
## 2 0 0 1 1 47 1
## 3 0 0 1 1 61 1
## 4 1 0 0 0 23 0
## 5 0 0 1 1 27 1
## 6 0 0 1 1 50 1
## # ℹ 1 more variable: Pendapatan <dbl>
#merubah variabel ke faktor
data_pendapatan$Pendidikan<- as.factor(data_pendapatan$Pendidikan)
data_pendapatan$Klasifikasi.Daerah<- as.factor(data_pendapatan$Klasifikasi.Daerah)
data_pendapatan$Jenis.Kelamin<-as.factor(data_pendapatan$Jenis.Kelamin)
data_pendapatan$Status.KRT<-as.factor(data_pendapatan$Status.KRT)
data_pendapatan$Status.Perkawinan<-as.factor(data_pendapatan$Status.Perkawinan)
data_pendapatan$Pendapatan<-as.factor(data_pendapatan$Pendapatan)
str(data_pendapatan$Pendapatan)
## Factor w/ 4 levels "1","2","3","4": 2 2 1 2 2 2 4 1 1 1 ...
library(arsenal)
##
## Attaching package: 'arsenal'
## The following object is masked from 'package:lubridate':
##
## is.Date
tab<-tableby(Pendapatan~ .,data=data_pendapatan)
summary(tab,text=TRUE)
##
##
## | | 1 (N=279) | 2 (N=568) | 3 (N=133) | 4 (N=20) | Total (N=1000) | p value|
## |:------------------|:---------------:|:---------------:|:---------------:|:---------------:|:---------------:|-------:|
## |Pendidikan | | | | | | < 0.001|
## |- 0 | 268 (96.1%) | 524 (92.3%) | 74 (55.6%) | 8 (40.0%) | 874 (87.4%) | |
## |- 1 | 11 (3.9%) | 44 (7.7%) | 59 (44.4%) | 12 (60.0%) | 126 (12.6%) | |
## |Klasifikasi.Daerah | | | | | | < 0.001|
## |- 0 | 66 (23.7%) | 128 (22.5%) | 2 (1.5%) | 0 (0.0%) | 196 (19.6%) | |
## |- 1 | 213 (76.3%) | 440 (77.5%) | 131 (98.5%) | 20 (100.0%) | 804 (80.4%) | |
## |Jenis.Kelamin | | | | | | < 0.001|
## |- 0 | 132 (47.3%) | 128 (22.5%) | 45 (33.8%) | 4 (20.0%) | 309 (30.9%) | |
## |- 1 | 147 (52.7%) | 440 (77.5%) | 88 (66.2%) | 16 (80.0%) | 691 (69.1%) | |
## |Status.KRT | | | | | | < 0.001|
## |- 0 | 141 (50.5%) | 208 (36.6%) | 54 (40.6%) | 4 (20.0%) | 407 (40.7%) | |
## |- 1 | 138 (49.5%) | 360 (63.4%) | 79 (59.4%) | 16 (80.0%) | 593 (59.3%) | |
## |Usia | | | | | | < 0.001|
## |- Mean (SD) | 42.957 (14.304) | 38.165 (12.304) | 39.910 (10.340) | 46.000 (5.554) | 39.891 (12.753) | |
## |- Range | 15.000 - 80.000 | 14.000 - 78.000 | 19.000 - 66.000 | 37.000 - 56.000 | 14.000 - 80.000 | |
## |Status.Perkawinan | | | | | | 0.007|
## |- 0 | 91 (32.6%) | 138 (24.3%) | 32 (24.1%) | 1 (5.0%) | 262 (26.2%) | |
## |- 1 | 188 (67.4%) | 430 (75.7%) | 101 (75.9%) | 19 (95.0%) | 738 (73.8%) | |
#pembetukan model
#Partisi data
set.seed(123)
acak<-createDataPartition(data_pendapatan$Pendapatan,p=0.8, list=FALSE)
data_train<-data_pendapatan[acak, ]
data_test<-data_pendapatan[-acak,]
summary(data_train)
## Pendidikan Klasifikasi.Daerah Jenis.Kelamin Status.KRT Usia
## 0:700 0:164 0:243 0:329 Min. :15.00
## 1:102 1:638 1:559 1:473 1st Qu.:30.00
## Median :39.00
## Mean :39.57
## 3rd Qu.:48.00
## Max. :80.00
## Status.Perkawinan Pendapatan
## 0:219 1:224
## 1:583 2:455
## 3:107
## 4: 16
##
##
summary(data_test)
## Pendidikan Klasifikasi.Daerah Jenis.Kelamin Status.KRT Usia
## 0:174 0: 32 0: 66 0: 78 Min. :14.00
## 1: 24 1:166 1:132 1:120 1st Qu.:32.00
## Median :40.00
## Mean :41.18
## 3rd Qu.:49.75
## Max. :78.00
## Status.Perkawinan Pendapatan
## 0: 43 1: 55
## 1:155 2:113
## 3: 26
## 4: 4
##
##
#model
model_pendapatan <- polr(Pendapatan~., method="logistic", data=data_train, Hess=T)
summary(model_pendapatan)
## Call:
## polr(formula = Pendapatan ~ ., data = data_train, Hess = T, method = "logistic")
##
## Coefficients:
## Value Std. Error t value
## Pendidikan1 2.52138 0.237066 10.636
## Klasifikasi.Daerah1 0.56414 0.175636 3.212
## Jenis.Kelamin1 0.54822 0.199061 2.754
## Status.KRT1 0.64226 0.212620 3.021
## Usia -0.03221 0.006865 -4.692
## Status.Perkawinan1 0.53006 0.180398 2.938
##
## Intercepts:
## Value Std. Error t value
## 1|2 -0.5217 0.3040 -1.7161
## 2|3 2.6805 0.3220 8.3249
## 3|4 5.1927 0.4173 12.4437
##
## Residual Deviance: 1454.495
## AIC: 1472.495
#uji multicol
library(car)
## Loading required package: carData
##
## Attaching package: 'car'
## The following object is masked from 'package:dplyr':
##
## recode
## The following object is masked from 'package:purrr':
##
## some
vif(model_pendapatan)
## Pendidikan Klasifikasi.Daerah Jenis.Kelamin Status.KRT
## 1.045494 1.026166 1.637565 2.146962
## Usia Status.Perkawinan
## 1.467146 1.279745
#uji gof
library(generalhoslem)
## Loading required package: reshape
##
## Attaching package: 'reshape'
## The following object is masked from 'package:lubridate':
##
## stamp
## The following object is masked from 'package:dplyr':
##
## rename
## The following objects are masked from 'package:tidyr':
##
## expand, smiths
lipsitz.test(model_pendapatan)
##
## Lipsitz goodness of fit test for ordinal response models
##
## data: formula: Pendapatan ~ Pendidikan + Klasifikasi.Daerah + Jenis.Kelamin + formula: Status.KRT + Usia + Status.Perkawinan
## LR statistic = 25.252, df = 9, p-value = 0.002705
#partial test
coefmodel <- c(-model_pendapatan$coefficients)
koefisien<-coef(summary(model_pendapatan))
#menghitung p-value
p<-pnorm(abs(koefisien[,"t value"]),lower.tail = FALSE)*2
(ctabel<-cbind <-cbind(round(koefisien,4),"p-value"=round(p,4)))
## Value Std. Error t value p-value
## Pendidikan1 2.5214 0.2371 10.6358 0.0000
## Klasifikasi.Daerah1 0.5641 0.1756 3.2120 0.0013
## Jenis.Kelamin1 0.5482 0.1991 2.7540 0.0059
## Status.KRT1 0.6423 0.2126 3.0207 0.0025
## Usia -0.0322 0.0069 -4.6924 0.0000
## Status.Perkawinan1 0.5301 0.1804 2.9383 0.0033
## 1|2 -0.5217 0.3040 -1.7161 0.0862
## 2|3 2.6805 0.3220 8.3249 0.0000
## 3|4 5.1927 0.4173 12.4437 0.0000
#prediksi pada data test
predict_prob=predict(model_pendapatan, data_test, type="prob")
predict_prob
## 1 2 3 4
## 1 0.33989193 0.5869007 0.06684284 0.0063644817
## 2 0.09094504 0.6200168 0.25712374 0.0319143966
## 3 0.34715626 0.5817922 0.06488758 0.0061639731
## 4 0.22654704 0.6515250 0.11079339 0.0111345959
## 5 0.45852474 0.4956463 0.04194932 0.0038796221
## 6 0.27736764 0.6268207 0.08729227 0.0085193724
## 7 0.45053826 0.5022034 0.04325224 0.0040061223
## 8 0.42708974 0.5211741 0.04733153 0.0044046568
## 9 0.16596916 0.6643290 0.15339845 0.0163033801
## 10 0.47871059 0.4788780 0.03883282 0.0035785796
## 11 0.41888599 0.5277037 0.04885576 0.0045545422
## 12 0.17047630 0.6643126 0.14941636 0.0157947308
## 13 0.19957107 0.6601811 0.12719275 0.0130550895
## 14 0.23224118 0.6492378 0.10773556 0.0107854394
## 15 0.49064332 0.4688438 0.03710068 0.0034121918
## 16 0.16155797 0.6641529 0.15746103 0.0168281297
## 17 0.17978138 0.6637024 0.14169240 0.0148238350
## 18 0.06370555 0.5621674 0.32789533 0.0462316915
## 19 0.45053826 0.5022034 0.04325224 0.0040061223
## 20 0.02126057 0.3268849 0.52003914 0.1318153519
## 21 0.31160469 0.6059504 0.07521149 0.0072334056
## 22 0.27172149 0.6299826 0.08953368 0.0087621991
## 23 0.21281567 0.6564151 0.11871699 0.0120522620
## 24 0.14889167 0.6624712 0.17013321 0.0185039632
## 25 0.22619708 0.6516608 0.11098546 0.0111566195
## 26 0.15302013 0.6632236 0.16582834 0.0179279092
## 27 0.36846966 0.5663638 0.05954561 0.0056209048
## 28 0.26846565 0.6317649 0.09086262 0.0089068110
## 29 0.28130549 0.6245642 0.08577461 0.0083557183
## 30 0.02026991 0.3169006 0.52533696 0.1374925487
## 31 0.16596916 0.6643290 0.15339845 0.0163033801
## 32 0.17978138 0.6637024 0.14169240 0.0148238350
## 33 0.27736764 0.6268207 0.08729227 0.0085193724
## 34 0.17508015 0.6641037 0.14551446 0.0153017039
## 35 0.04556406 0.4944027 0.39541220 0.0646210140
## 36 0.04418351 0.4877719 0.40144897 0.0665956573
## 37 0.03509392 0.4369870 0.44477770 0.0831413986
## 38 0.31855620 0.6014032 0.07303489 0.0070057152
## 39 0.44665532 0.5053745 0.04390095 0.0040692477
## 40 0.27172149 0.6299826 0.08953368 0.0087621991
## 41 0.71705691 0.2671478 0.01449563 0.0012996978
## 42 0.73925761 0.2465998 0.01298064 0.0011619105
## 43 0.20746910 0.6580563 0.12203270 0.0124419135
## 44 0.29756515 0.6148343 0.07987519 0.0077253407
## 45 0.12251660 0.6518963 0.20251080 0.0230763108
## 46 0.30807312 0.6082240 0.07634991 0.0073529734
## 47 0.27095755 0.6304036 0.08984305 0.0087958219
## 48 0.22095233 0.6536287 0.11392405 0.0114949243
## 49 0.13329055 0.6575563 0.18815788 0.0209952352
## 50 0.43464551 0.5151093 0.04597365 0.0042715776
## 51 0.21511971 0.6556604 0.11732969 0.0118902277
## 52 0.45466282 0.4988228 0.04257417 0.0039402413
## 53 0.41461690 0.5310778 0.04967039 0.0046348677
## 54 0.28090167 0.6247975 0.08592857 0.0083722920
## 55 0.42674716 0.5214479 0.04739412 0.0044108014
## 56 0.23803439 0.6467689 0.10474960 0.0104471161
## 57 0.26846565 0.6317649 0.09086262 0.0089068110
## 58 0.28387032 0.6230726 0.08480553 0.0082515393
## 59 0.34641647 0.5823161 0.06508340 0.0061840117
## 60 0.40329027 0.5399469 0.05190667 0.0048561600
## 61 0.27456003 0.6284041 0.08839700 0.0086388872
## 62 0.28090167 0.6247975 0.08592857 0.0083722920
## 63 0.36890388 0.5660432 0.05944247 0.0056104871
## 64 0.23803439 0.6467689 0.10474960 0.0104471161
## 65 0.16596916 0.6643290 0.15339845 0.0163033801
## 66 0.07855333 0.5984520 0.28574846 0.0372462524
## 67 0.27095755 0.6304036 0.08984305 0.0087958219
## 68 0.15724192 0.6637843 0.16160429 0.0173694711
## 69 0.16596916 0.6643290 0.15339845 0.0163033801
## 70 0.16155797 0.6641529 0.15746103 0.0168281297
## 71 0.38026744 0.5575684 0.05681787 0.0053462450
## 72 0.46653259 0.4890267 0.04068364 0.0037571013
## 73 0.02026991 0.3169006 0.52533696 0.1374925487
## 74 0.05474735 0.5327156 0.35866117 0.0538759151
## 75 0.38449844 0.5543738 0.05587599 0.0052518158
## 76 0.46262786 0.4922600 0.04129583 0.0038163179
## 77 0.20222235 0.6595094 0.12542427 0.0128439986
## 78 0.40727088 0.5368441 0.05110803 0.0047769979
## 79 0.25916136 0.6366828 0.09481598 0.0093398483
## 80 0.46662023 0.4889540 0.04067000 0.0037557834
## 81 0.47902691 0.4786131 0.03878589 0.0035740627
## 82 0.43113554 0.5179325 0.04659910 0.0043328228
## 83 0.28465645 0.6226121 0.08451151 0.0082199798
## 84 0.40760895 0.5365799 0.05104085 0.0047703456
## 85 0.42634451 0.5217696 0.04746780 0.0044180360
## 86 0.19707529 0.6607733 0.12889246 0.0132589035
## 87 0.41461690 0.5310778 0.04967039 0.0046348677
## 88 0.07183710 0.5836855 0.30360654 0.0408709027
## 89 0.49064332 0.4688438 0.03710068 0.0034121918
## 90 0.36846966 0.5663638 0.05954561 0.0056209048
## 91 0.24991602 0.6412930 0.09898936 0.0098016624
## 92 0.32873670 0.5945812 0.06999258 0.0066894668
## 93 0.26846565 0.6317649 0.09086262 0.0089068110
## 94 0.15302013 0.6632236 0.16582834 0.0179279092
## 95 0.28786371 0.6207171 0.08332622 0.0080929882
## 96 0.24392621 0.6441200 0.10183454 0.0101192969
## 97 0.24392621 0.6441200 0.10183454 0.0101192969
## 98 0.13705608 0.6590692 0.18353150 0.0203432319
## 99 0.24392621 0.6441200 0.10183454 0.0101192969
## 100 0.37270581 0.5632254 0.05854852 0.0055203013
## 101 0.23224118 0.6492378 0.10773556 0.0107854394
## 102 0.01430860 0.2487227 0.55183273 0.1851359395
## 103 0.10976201 0.6421873 0.22199804 0.0260526150
## 104 0.39137156 0.5491410 0.05438471 0.0051027344
## 105 0.44257719 0.5086927 0.04459336 0.0041367300
## 106 0.42245620 0.5248693 0.04818587 0.0044886033
## 107 0.34342277 0.5844279 0.06588339 0.0062659748
## 108 0.38059748 0.5573200 0.05674373 0.0053388042
## 109 0.17508015 0.6641037 0.14551446 0.0153017039
## 110 0.14889167 0.6624712 0.17013321 0.0185039632
## 111 0.15302013 0.6632236 0.16582834 0.0179279092
## 112 0.16155797 0.6641529 0.15746103 0.0168281297
## 113 0.30124924 0.6125439 0.07861502 0.0075918636
## 114 0.14485554 0.6615275 0.17451883 0.0190981670
## 115 0.23224118 0.6492378 0.10773556 0.0107854394
## 116 0.41461690 0.5310778 0.04967039 0.0046348677
## 117 0.45053826 0.5022034 0.04325224 0.0040061223
## 118 0.55895472 0.4099506 0.02849904 0.0025956108
## 119 0.27483901 0.6282477 0.08828637 0.0086269035
## 120 0.36114393 0.5717369 0.06131877 0.0058004013
## 121 0.02299337 0.3435494 0.51054383 0.1229133549
## 122 0.07183710 0.5836855 0.30360654 0.0408709027
## 123 0.39907098 0.5432185 0.05276876 0.0049417786
## 124 0.06565419 0.5677306 0.32178340 0.0448318633
## 125 0.17978138 0.6637024 0.14169240 0.0148238350
## 126 0.39556304 0.5459246 0.05349803 0.0050143412
## 127 0.23803439 0.6467689 0.10474960 0.0104471161
## 128 0.05898657 0.5474989 0.34353007 0.0499844737
## 129 0.05996905 0.5507002 0.34017377 0.0491570221
## 130 0.01786314 0.2911406 0.53749790 0.1534983909
## 131 0.45451454 0.4989445 0.04259835 0.0039425891
## 132 0.06080023 0.5533463 0.33737671 0.0484767921
## 133 0.18458060 0.6631090 0.13794973 0.0143606721
## 134 0.40329027 0.5399469 0.05190667 0.0048561600
## 135 0.04995616 0.5139051 0.37711901 0.0590196865
## 136 0.62119536 0.3546030 0.02219451 0.0020071335
## 137 0.32474159 0.5972806 0.07116660 0.0068112315
## 138 0.24392621 0.6441200 0.10183454 0.0101192969
## 139 0.41510198 0.5306953 0.04957707 0.0046256581
## 140 0.27095755 0.6304036 0.08984305 0.0087958219
## 141 0.23803439 0.6467689 0.10474960 0.0104471161
## 142 0.19925205 0.6602590 0.12740810 0.0130808615
## 143 0.17047630 0.6643126 0.14941636 0.0157947308
## 144 0.29795524 0.6145932 0.07974048 0.0077110529
## 145 0.22945396 0.6503752 0.10921663 0.0109542133
## 146 0.47064508 0.4856103 0.04004887 0.0036957888
## 147 0.78317279 0.2056922 0.01022275 0.0009122662
## 148 0.53532840 0.4305715 0.03124550 0.0028546055
## 149 0.14485554 0.6615275 0.17451883 0.0190981670
## 150 0.25600312 0.6382900 0.09621303 0.0094939025
## 151 0.34310711 0.5846497 0.06596846 0.0062746996
## 152 0.45852474 0.4956463 0.04194932 0.0038796221
## 153 0.59772329 0.3756330 0.02442894 0.0022147441
## 154 0.10932412 0.6417869 0.22272218 0.0261667667
## 155 0.20476669 0.6588248 0.12376200 0.0126464808
## 156 0.16596916 0.6643290 0.15339845 0.0163033801
## 157 0.25229513 0.6401344 0.09789083 0.0096796313
## 158 0.49064332 0.4688438 0.03710068 0.0034121918
## 159 0.33184489 0.5924620 0.06909632 0.0065967418
## 160 0.31077648 0.6064859 0.07547646 0.0072612050
## 161 0.47067814 0.4855828 0.04004381 0.0036953003
## 162 0.01386131 0.2429735 0.55312021 0.1900449810
## 163 0.62874533 0.3478022 0.02150882 0.0019436317
## 164 0.40329027 0.5399469 0.05190667 0.0048561600
## 165 0.55895472 0.4099506 0.02849904 0.0025956108
## 166 0.01730668 0.2848615 0.54010086 0.1577309180
## 167 0.32873670 0.5945812 0.06999258 0.0066894668
## 168 0.42674716 0.5214479 0.04739412 0.0044108014
## 169 0.20444149 0.6589145 0.12397259 0.0126714564
## 170 0.27095755 0.6304036 0.08984305 0.0087958219
## 171 0.21545729 0.6555475 0.11712846 0.0118667733
## 172 0.30124924 0.6125439 0.07861502 0.0075918636
## 173 0.35777089 0.5741868 0.06215679 0.0058854969
## 174 0.22095233 0.6536287 0.11392405 0.0114949243
## 175 0.23224118 0.6492378 0.10773556 0.0107854394
## 176 0.32474159 0.5972806 0.07116660 0.0068112315
## 177 0.22095233 0.6536287 0.11392405 0.0114949243
## 178 0.22654704 0.6515250 0.11079339 0.0111345959
## 179 0.17047630 0.6643126 0.14941636 0.0157947308
## 180 0.21545729 0.6555475 0.11712846 0.0118667733
## 181 0.24392621 0.6441200 0.10183454 0.0101192969
## 182 0.28130549 0.6245642 0.08577461 0.0083557183
## 183 0.17047630 0.6643126 0.14941636 0.0157947308
## 184 0.37613718 0.5606669 0.05775550 0.0054404569
## 185 0.23803439 0.6467689 0.10474960 0.0104471161
## 186 0.30137514 0.6124651 0.07857243 0.0075873593
## 187 0.12961293 0.6558557 0.19286365 0.0216676732
## 188 0.20476669 0.6588248 0.12376200 0.0126464808
## 189 0.26464136 0.6338185 0.09245901 0.0090811599
## 190 0.03558130 0.4400642 0.44229651 0.0820579892
## 191 0.19447504 0.6613477 0.13070051 0.0134767203
## 192 0.23224118 0.6492378 0.10773556 0.0107854394
## 193 0.17978138 0.6637024 0.14169240 0.0148238350
## 194 0.15724192 0.6637843 0.16160429 0.0173694711
## 195 0.37303323 0.5629819 0.05847229 0.0055126196
## 196 0.20476669 0.6588248 0.12376200 0.0126464808
## 197 0.40362728 0.5396848 0.05183851 0.0048493980
## 198 0.30807312 0.6082240 0.07634991 0.0073529734
#confussion matrix
prediksi.test <- predict(model_pendapatan, data_test, type="class")
data_test$Pendapatan<-as.factor(data_test$Pendapatan)
confusionMatrix(as.factor(prediksi.test), data_test$Pendapatan, positive="1")
## Confusion Matrix and Statistics
##
## Reference
## Prediction 1 2 3 4
## 1 11 2 0 0
## 2 43 107 23 2
## 3 1 4 3 2
## 4 0 0 0 0
##
## Overall Statistics
##
## Accuracy : 0.6111
## 95% CI : (0.5394, 0.6794)
## No Information Rate : 0.5707
## P-Value [Acc > NIR] : 0.1406
##
## Kappa : 0.1738
##
## Mcnemar's Test P-Value : NA
##
## Statistics by Class:
##
## Class: 1 Class: 2 Class: 3 Class: 4
## Sensitivity 0.20000 0.9469 0.11538 0.0000
## Specificity 0.98601 0.2000 0.95930 1.0000
## Pos Pred Value 0.84615 0.6114 0.30000 NaN
## Neg Pred Value 0.76216 0.7391 0.87766 0.9798
## Prevalence 0.27778 0.5707 0.13131 0.0202
## Detection Rate 0.05556 0.5404 0.01515 0.0000
## Detection Prevalence 0.06566 0.8838 0.05051 0.0000
## Balanced Accuracy 0.59301 0.5735 0.53734 0.5000
#odds ratio
data.frame(coef(model_pendapatan), exp(coef(model_pendapatan)))
## coef.model_pendapatan. exp.coef.model_pendapatan..
## Pendidikan1 2.52137511 12.4456991
## Klasifikasi.Daerah1 0.56413644 1.7579291
## Jenis.Kelamin1 0.54821922 1.7301692
## Status.KRT1 0.64226197 1.9007755
## Usia -0.03221301 0.9683003
## Status.Perkawinan1 0.53005786 1.6990306