“kelompok: 3”1. Safira Khoirulanisa Salsabila” “2. Regita Aftina Rizqi” “3. Ayunda Mulya Putri” “4. Alfian Adi Pratama” “5. Jeni Anggraeni” “6. Muhammad maulana Zafrani”
library(ggplot2)
library(ggplot2)
data <- read.csv("heart.csv")
data
## age sex cp trestbps chol fbs restecg thalach exang oldpeak slope ca thal
## 1 63 1 3 145 233 1 0 150 0 2.3 0 0 1
## 2 37 1 2 130 250 0 1 187 0 3.5 0 0 2
## 3 41 0 1 130 204 0 0 172 0 1.4 2 0 2
## 4 56 1 1 120 236 0 1 178 0 0.8 2 0 2
## 5 57 0 0 120 354 0 1 163 1 0.6 2 0 2
## 6 57 1 0 140 192 0 1 148 0 0.4 1 0 1
## 7 56 0 1 140 294 0 0 153 0 1.3 1 0 2
## 8 44 1 1 120 263 0 1 173 0 0.0 2 0 3
## 9 52 1 2 172 199 1 1 162 0 0.5 2 0 3
## 10 57 1 2 150 168 0 1 174 0 1.6 2 0 2
## 11 54 1 0 140 239 0 1 160 0 1.2 2 0 2
## 12 48 0 2 130 275 0 1 139 0 0.2 2 0 2
## 13 49 1 1 130 266 0 1 171 0 0.6 2 0 2
## 14 64 1 3 110 211 0 0 144 1 1.8 1 0 2
## 15 58 0 3 150 283 1 0 162 0 1.0 2 0 2
## 16 50 0 2 120 219 0 1 158 0 1.6 1 0 2
## 17 58 0 2 120 340 0 1 172 0 0.0 2 0 2
## 18 66 0 3 150 226 0 1 114 0 2.6 0 0 2
## 19 43 1 0 150 247 0 1 171 0 1.5 2 0 2
## 20 69 0 3 140 239 0 1 151 0 1.8 2 2 2
## 21 59 1 0 135 234 0 1 161 0 0.5 1 0 3
## 22 44 1 2 130 233 0 1 179 1 0.4 2 0 2
## 23 42 1 0 140 226 0 1 178 0 0.0 2 0 2
## 24 61 1 2 150 243 1 1 137 1 1.0 1 0 2
## 25 40 1 3 140 199 0 1 178 1 1.4 2 0 3
## 26 71 0 1 160 302 0 1 162 0 0.4 2 2 2
## 27 59 1 2 150 212 1 1 157 0 1.6 2 0 2
## 28 51 1 2 110 175 0 1 123 0 0.6 2 0 2
## 29 65 0 2 140 417 1 0 157 0 0.8 2 1 2
## 30 53 1 2 130 197 1 0 152 0 1.2 0 0 2
## 31 41 0 1 105 198 0 1 168 0 0.0 2 1 2
## 32 65 1 0 120 177 0 1 140 0 0.4 2 0 3
## 33 44 1 1 130 219 0 0 188 0 0.0 2 0 2
## 34 54 1 2 125 273 0 0 152 0 0.5 0 1 2
## 35 51 1 3 125 213 0 0 125 1 1.4 2 1 2
## 36 46 0 2 142 177 0 0 160 1 1.4 0 0 2
## 37 54 0 2 135 304 1 1 170 0 0.0 2 0 2
## 38 54 1 2 150 232 0 0 165 0 1.6 2 0 3
## 39 65 0 2 155 269 0 1 148 0 0.8 2 0 2
## 40 65 0 2 160 360 0 0 151 0 0.8 2 0 2
## 41 51 0 2 140 308 0 0 142 0 1.5 2 1 2
## 42 48 1 1 130 245 0 0 180 0 0.2 1 0 2
## 43 45 1 0 104 208 0 0 148 1 3.0 1 0 2
## 44 53 0 0 130 264 0 0 143 0 0.4 1 0 2
## 45 39 1 2 140 321 0 0 182 0 0.0 2 0 2
## 46 52 1 1 120 325 0 1 172 0 0.2 2 0 2
## 47 44 1 2 140 235 0 0 180 0 0.0 2 0 2
## 48 47 1 2 138 257 0 0 156 0 0.0 2 0 2
## 49 53 0 2 128 216 0 0 115 0 0.0 2 0 0
## 50 53 0 0 138 234 0 0 160 0 0.0 2 0 2
## 51 51 0 2 130 256 0 0 149 0 0.5 2 0 2
## 52 66 1 0 120 302 0 0 151 0 0.4 1 0 2
## 53 62 1 2 130 231 0 1 146 0 1.8 1 3 3
## 54 44 0 2 108 141 0 1 175 0 0.6 1 0 2
## 55 63 0 2 135 252 0 0 172 0 0.0 2 0 2
## 56 52 1 1 134 201 0 1 158 0 0.8 2 1 2
## 57 48 1 0 122 222 0 0 186 0 0.0 2 0 2
## 58 45 1 0 115 260 0 0 185 0 0.0 2 0 2
## 59 34 1 3 118 182 0 0 174 0 0.0 2 0 2
## 60 57 0 0 128 303 0 0 159 0 0.0 2 1 2
## 61 71 0 2 110 265 1 0 130 0 0.0 2 1 2
## 62 54 1 1 108 309 0 1 156 0 0.0 2 0 3
## 63 52 1 3 118 186 0 0 190 0 0.0 1 0 1
## 64 41 1 1 135 203 0 1 132 0 0.0 1 0 1
## 65 58 1 2 140 211 1 0 165 0 0.0 2 0 2
## 66 35 0 0 138 183 0 1 182 0 1.4 2 0 2
## 67 51 1 2 100 222 0 1 143 1 1.2 1 0 2
## 68 45 0 1 130 234 0 0 175 0 0.6 1 0 2
## 69 44 1 1 120 220 0 1 170 0 0.0 2 0 2
## 70 62 0 0 124 209 0 1 163 0 0.0 2 0 2
## 71 54 1 2 120 258 0 0 147 0 0.4 1 0 3
## 72 51 1 2 94 227 0 1 154 1 0.0 2 1 3
## 73 29 1 1 130 204 0 0 202 0 0.0 2 0 2
## 74 51 1 0 140 261 0 0 186 1 0.0 2 0 2
## 75 43 0 2 122 213 0 1 165 0 0.2 1 0 2
## 76 55 0 1 135 250 0 0 161 0 1.4 1 0 2
## 77 51 1 2 125 245 1 0 166 0 2.4 1 0 2
## 78 59 1 1 140 221 0 1 164 1 0.0 2 0 2
## 79 52 1 1 128 205 1 1 184 0 0.0 2 0 2
## 80 58 1 2 105 240 0 0 154 1 0.6 1 0 3
## 81 41 1 2 112 250 0 1 179 0 0.0 2 0 2
## 82 45 1 1 128 308 0 0 170 0 0.0 2 0 2
## 83 60 0 2 102 318 0 1 160 0 0.0 2 1 2
## 84 52 1 3 152 298 1 1 178 0 1.2 1 0 3
## 85 42 0 0 102 265 0 0 122 0 0.6 1 0 2
## 86 67 0 2 115 564 0 0 160 0 1.6 1 0 3
## 87 68 1 2 118 277 0 1 151 0 1.0 2 1 3
## 88 46 1 1 101 197 1 1 156 0 0.0 2 0 3
## 89 54 0 2 110 214 0 1 158 0 1.6 1 0 2
## 90 58 0 0 100 248 0 0 122 0 1.0 1 0 2
## 91 48 1 2 124 255 1 1 175 0 0.0 2 2 2
## 92 57 1 0 132 207 0 1 168 1 0.0 2 0 3
## 93 52 1 2 138 223 0 1 169 0 0.0 2 4 2
## 94 54 0 1 132 288 1 0 159 1 0.0 2 1 2
## 95 45 0 1 112 160 0 1 138 0 0.0 1 0 2
## 96 53 1 0 142 226 0 0 111 1 0.0 2 0 3
## 97 62 0 0 140 394 0 0 157 0 1.2 1 0 2
## 98 52 1 0 108 233 1 1 147 0 0.1 2 3 3
## 99 43 1 2 130 315 0 1 162 0 1.9 2 1 2
## 100 53 1 2 130 246 1 0 173 0 0.0 2 3 2
## 101 42 1 3 148 244 0 0 178 0 0.8 2 2 2
## 102 59 1 3 178 270 0 0 145 0 4.2 0 0 3
## 103 63 0 1 140 195 0 1 179 0 0.0 2 2 2
## 104 42 1 2 120 240 1 1 194 0 0.8 0 0 3
## 105 50 1 2 129 196 0 1 163 0 0.0 2 0 2
## 106 68 0 2 120 211 0 0 115 0 1.5 1 0 2
## 107 69 1 3 160 234 1 0 131 0 0.1 1 1 2
## 108 45 0 0 138 236 0 0 152 1 0.2 1 0 2
## 109 50 0 1 120 244 0 1 162 0 1.1 2 0 2
## 110 50 0 0 110 254 0 0 159 0 0.0 2 0 2
## 111 64 0 0 180 325 0 1 154 1 0.0 2 0 2
## 112 57 1 2 150 126 1 1 173 0 0.2 2 1 3
## 113 64 0 2 140 313 0 1 133 0 0.2 2 0 3
## 114 43 1 0 110 211 0 1 161 0 0.0 2 0 3
## 115 55 1 1 130 262 0 1 155 0 0.0 2 0 2
## 116 37 0 2 120 215 0 1 170 0 0.0 2 0 2
## 117 41 1 2 130 214 0 0 168 0 2.0 1 0 2
## 118 56 1 3 120 193 0 0 162 0 1.9 1 0 3
## 119 46 0 1 105 204 0 1 172 0 0.0 2 0 2
## 120 46 0 0 138 243 0 0 152 1 0.0 1 0 2
## 121 64 0 0 130 303 0 1 122 0 2.0 1 2 2
## 122 59 1 0 138 271 0 0 182 0 0.0 2 0 2
## 123 41 0 2 112 268 0 0 172 1 0.0 2 0 2
## 124 54 0 2 108 267 0 0 167 0 0.0 2 0 2
## 125 39 0 2 94 199 0 1 179 0 0.0 2 0 2
## 126 34 0 1 118 210 0 1 192 0 0.7 2 0 2
## 127 47 1 0 112 204 0 1 143 0 0.1 2 0 2
## 128 67 0 2 152 277 0 1 172 0 0.0 2 1 2
## 129 52 0 2 136 196 0 0 169 0 0.1 1 0 2
## 130 74 0 1 120 269 0 0 121 1 0.2 2 1 2
## 131 54 0 2 160 201 0 1 163 0 0.0 2 1 2
## 132 49 0 1 134 271 0 1 162 0 0.0 1 0 2
## 133 42 1 1 120 295 0 1 162 0 0.0 2 0 2
## 134 41 1 1 110 235 0 1 153 0 0.0 2 0 2
## 135 41 0 1 126 306 0 1 163 0 0.0 2 0 2
## 136 49 0 0 130 269 0 1 163 0 0.0 2 0 2
## 137 60 0 2 120 178 1 1 96 0 0.0 2 0 2
## 138 62 1 1 128 208 1 0 140 0 0.0 2 0 2
## 139 57 1 0 110 201 0 1 126 1 1.5 1 0 1
## 140 64 1 0 128 263 0 1 105 1 0.2 1 1 3
## 141 51 0 2 120 295 0 0 157 0 0.6 2 0 2
## 142 43 1 0 115 303 0 1 181 0 1.2 1 0 2
## 143 42 0 2 120 209 0 1 173 0 0.0 1 0 2
## 144 67 0 0 106 223 0 1 142 0 0.3 2 2 2
## 145 76 0 2 140 197 0 2 116 0 1.1 1 0 2
## 146 70 1 1 156 245 0 0 143 0 0.0 2 0 2
## 147 44 0 2 118 242 0 1 149 0 0.3 1 1 2
## 148 60 0 3 150 240 0 1 171 0 0.9 2 0 2
## 149 44 1 2 120 226 0 1 169 0 0.0 2 0 2
## 150 42 1 2 130 180 0 1 150 0 0.0 2 0 2
## 151 66 1 0 160 228 0 0 138 0 2.3 2 0 1
## 152 71 0 0 112 149 0 1 125 0 1.6 1 0 2
## 153 64 1 3 170 227 0 0 155 0 0.6 1 0 3
## 154 66 0 2 146 278 0 0 152 0 0.0 1 1 2
## 155 39 0 2 138 220 0 1 152 0 0.0 1 0 2
## 156 58 0 0 130 197 0 1 131 0 0.6 1 0 2
## 157 47 1 2 130 253 0 1 179 0 0.0 2 0 2
## 158 35 1 1 122 192 0 1 174 0 0.0 2 0 2
## 159 58 1 1 125 220 0 1 144 0 0.4 1 4 3
## 160 56 1 1 130 221 0 0 163 0 0.0 2 0 3
## 161 56 1 1 120 240 0 1 169 0 0.0 0 0 2
## 162 55 0 1 132 342 0 1 166 0 1.2 2 0 2
## 163 41 1 1 120 157 0 1 182 0 0.0 2 0 2
## 164 38 1 2 138 175 0 1 173 0 0.0 2 4 2
## 165 38 1 2 138 175 0 1 173 0 0.0 2 4 2
## 166 67 1 0 160 286 0 0 108 1 1.5 1 3 2
## 167 67 1 0 120 229 0 0 129 1 2.6 1 2 3
## 168 62 0 0 140 268 0 0 160 0 3.6 0 2 2
## 169 63 1 0 130 254 0 0 147 0 1.4 1 1 3
## 170 53 1 0 140 203 1 0 155 1 3.1 0 0 3
## 171 56 1 2 130 256 1 0 142 1 0.6 1 1 1
## 172 48 1 1 110 229 0 1 168 0 1.0 0 0 3
## 173 58 1 1 120 284 0 0 160 0 1.8 1 0 2
## 174 58 1 2 132 224 0 0 173 0 3.2 2 2 3
## 175 60 1 0 130 206 0 0 132 1 2.4 1 2 3
## 176 40 1 0 110 167 0 0 114 1 2.0 1 0 3
## 177 60 1 0 117 230 1 1 160 1 1.4 2 2 3
## 178 64 1 2 140 335 0 1 158 0 0.0 2 0 2
## 179 43 1 0 120 177 0 0 120 1 2.5 1 0 3
## 180 57 1 0 150 276 0 0 112 1 0.6 1 1 1
## 181 55 1 0 132 353 0 1 132 1 1.2 1 1 3
## 182 65 0 0 150 225 0 0 114 0 1.0 1 3 3
## 183 61 0 0 130 330 0 0 169 0 0.0 2 0 2
## 184 58 1 2 112 230 0 0 165 0 2.5 1 1 3
## 185 50 1 0 150 243 0 0 128 0 2.6 1 0 3
## 186 44 1 0 112 290 0 0 153 0 0.0 2 1 2
## 187 60 1 0 130 253 0 1 144 1 1.4 2 1 3
## 188 54 1 0 124 266 0 0 109 1 2.2 1 1 3
## 189 50 1 2 140 233 0 1 163 0 0.6 1 1 3
## 190 41 1 0 110 172 0 0 158 0 0.0 2 0 3
## 191 51 0 0 130 305 0 1 142 1 1.2 1 0 3
## 192 58 1 0 128 216 0 0 131 1 2.2 1 3 3
## 193 54 1 0 120 188 0 1 113 0 1.4 1 1 3
## 194 60 1 0 145 282 0 0 142 1 2.8 1 2 3
## 195 60 1 2 140 185 0 0 155 0 3.0 1 0 2
## 196 59 1 0 170 326 0 0 140 1 3.4 0 0 3
## 197 46 1 2 150 231 0 1 147 0 3.6 1 0 2
## 198 67 1 0 125 254 1 1 163 0 0.2 1 2 3
## 199 62 1 0 120 267 0 1 99 1 1.8 1 2 3
## 200 65 1 0 110 248 0 0 158 0 0.6 2 2 1
## 201 44 1 0 110 197 0 0 177 0 0.0 2 1 2
## 202 60 1 0 125 258 0 0 141 1 2.8 1 1 3
## 203 58 1 0 150 270 0 0 111 1 0.8 2 0 3
## 204 68 1 2 180 274 1 0 150 1 1.6 1 0 3
## 205 62 0 0 160 164 0 0 145 0 6.2 0 3 3
## 206 52 1 0 128 255 0 1 161 1 0.0 2 1 3
## 207 59 1 0 110 239 0 0 142 1 1.2 1 1 3
## 208 60 0 0 150 258 0 0 157 0 2.6 1 2 3
## 209 49 1 2 120 188 0 1 139 0 2.0 1 3 3
## 210 59 1 0 140 177 0 1 162 1 0.0 2 1 3
## 211 57 1 2 128 229 0 0 150 0 0.4 1 1 3
## 212 61 1 0 120 260 0 1 140 1 3.6 1 1 3
## 213 39 1 0 118 219 0 1 140 0 1.2 1 0 3
## 214 61 0 0 145 307 0 0 146 1 1.0 1 0 3
## 215 56 1 0 125 249 1 0 144 1 1.2 1 1 2
## 216 43 0 0 132 341 1 0 136 1 3.0 1 0 3
## 217 62 0 2 130 263 0 1 97 0 1.2 1 1 3
## 218 63 1 0 130 330 1 0 132 1 1.8 2 3 3
## 219 65 1 0 135 254 0 0 127 0 2.8 1 1 3
## 220 48 1 0 130 256 1 0 150 1 0.0 2 2 3
## 221 63 0 0 150 407 0 0 154 0 4.0 1 3 3
## 222 55 1 0 140 217 0 1 111 1 5.6 0 0 3
## 223 65 1 3 138 282 1 0 174 0 1.4 1 1 2
## 224 56 0 0 200 288 1 0 133 1 4.0 0 2 3
## 225 54 1 0 110 239 0 1 126 1 2.8 1 1 3
## 226 70 1 0 145 174 0 1 125 1 2.6 0 0 3
## 227 62 1 1 120 281 0 0 103 0 1.4 1 1 3
## 228 35 1 0 120 198 0 1 130 1 1.6 1 0 3
## 229 59 1 3 170 288 0 0 159 0 0.2 1 0 3
## 230 64 1 2 125 309 0 1 131 1 1.8 1 0 3
## 231 47 1 2 108 243 0 1 152 0 0.0 2 0 2
## 232 57 1 0 165 289 1 0 124 0 1.0 1 3 3
## 233 55 1 0 160 289 0 0 145 1 0.8 1 1 3
## 234 64 1 0 120 246 0 0 96 1 2.2 0 1 2
## 235 70 1 0 130 322 0 0 109 0 2.4 1 3 2
## 236 51 1 0 140 299 0 1 173 1 1.6 2 0 3
## 237 58 1 0 125 300 0 0 171 0 0.0 2 2 3
## 238 60 1 0 140 293 0 0 170 0 1.2 1 2 3
## 239 77 1 0 125 304 0 0 162 1 0.0 2 3 2
## 240 35 1 0 126 282 0 0 156 1 0.0 2 0 3
## 241 70 1 2 160 269 0 1 112 1 2.9 1 1 3
## 242 59 0 0 174 249 0 1 143 1 0.0 1 0 2
## 243 64 1 0 145 212 0 0 132 0 2.0 1 2 1
## 244 57 1 0 152 274 0 1 88 1 1.2 1 1 3
## 245 56 1 0 132 184 0 0 105 1 2.1 1 1 1
## 246 48 1 0 124 274 0 0 166 0 0.5 1 0 3
## 247 56 0 0 134 409 0 0 150 1 1.9 1 2 3
## 248 66 1 1 160 246 0 1 120 1 0.0 1 3 1
## 249 54 1 1 192 283 0 0 195 0 0.0 2 1 3
## 250 69 1 2 140 254 0 0 146 0 2.0 1 3 3
## 251 51 1 0 140 298 0 1 122 1 4.2 1 3 3
## 252 43 1 0 132 247 1 0 143 1 0.1 1 4 3
## 253 62 0 0 138 294 1 1 106 0 1.9 1 3 2
## 254 67 1 0 100 299 0 0 125 1 0.9 1 2 2
## 255 59 1 3 160 273 0 0 125 0 0.0 2 0 2
## 256 45 1 0 142 309 0 0 147 1 0.0 1 3 3
## 257 58 1 0 128 259 0 0 130 1 3.0 1 2 3
## 258 50 1 0 144 200 0 0 126 1 0.9 1 0 3
## 259 62 0 0 150 244 0 1 154 1 1.4 1 0 2
## 260 38 1 3 120 231 0 1 182 1 3.8 1 0 3
## 261 66 0 0 178 228 1 1 165 1 1.0 1 2 3
## 262 52 1 0 112 230 0 1 160 0 0.0 2 1 2
## 263 53 1 0 123 282 0 1 95 1 2.0 1 2 3
## 264 63 0 0 108 269 0 1 169 1 1.8 1 2 2
## 265 54 1 0 110 206 0 0 108 1 0.0 1 1 2
## 266 66 1 0 112 212 0 0 132 1 0.1 2 1 2
## 267 55 0 0 180 327 0 2 117 1 3.4 1 0 2
## 268 49 1 2 118 149 0 0 126 0 0.8 2 3 2
## 269 54 1 0 122 286 0 0 116 1 3.2 1 2 2
## 270 56 1 0 130 283 1 0 103 1 1.6 0 0 3
## 271 46 1 0 120 249 0 0 144 0 0.8 2 0 3
## 272 61 1 3 134 234 0 1 145 0 2.6 1 2 2
## 273 67 1 0 120 237 0 1 71 0 1.0 1 0 2
## 274 58 1 0 100 234 0 1 156 0 0.1 2 1 3
## 275 47 1 0 110 275 0 0 118 1 1.0 1 1 2
## 276 52 1 0 125 212 0 1 168 0 1.0 2 2 3
## 277 58 1 0 146 218 0 1 105 0 2.0 1 1 3
## 278 57 1 1 124 261 0 1 141 0 0.3 2 0 3
## 279 58 0 1 136 319 1 0 152 0 0.0 2 2 2
## 280 61 1 0 138 166 0 0 125 1 3.6 1 1 2
## 281 42 1 0 136 315 0 1 125 1 1.8 1 0 1
## 282 52 1 0 128 204 1 1 156 1 1.0 1 0 0
## 283 59 1 2 126 218 1 1 134 0 2.2 1 1 1
## 284 40 1 0 152 223 0 1 181 0 0.0 2 0 3
## 285 61 1 0 140 207 0 0 138 1 1.9 2 1 3
## 286 46 1 0 140 311 0 1 120 1 1.8 1 2 3
## 287 59 1 3 134 204 0 1 162 0 0.8 2 2 2
## 288 57 1 1 154 232 0 0 164 0 0.0 2 1 2
## 289 57 1 0 110 335 0 1 143 1 3.0 1 1 3
## 290 55 0 0 128 205 0 2 130 1 2.0 1 1 3
## 291 61 1 0 148 203 0 1 161 0 0.0 2 1 3
## 292 58 1 0 114 318 0 2 140 0 4.4 0 3 1
## 293 58 0 0 170 225 1 0 146 1 2.8 1 2 1
## 294 67 1 2 152 212 0 0 150 0 0.8 1 0 3
## 295 44 1 0 120 169 0 1 144 1 2.8 0 0 1
## 296 63 1 0 140 187 0 0 144 1 4.0 2 2 3
## 297 63 0 0 124 197 0 1 136 1 0.0 1 0 2
## 298 59 1 0 164 176 1 0 90 0 1.0 1 2 1
## 299 57 0 0 140 241 0 1 123 1 0.2 1 0 3
## 300 45 1 3 110 264 0 1 132 0 1.2 1 0 3
## 301 68 1 0 144 193 1 1 141 0 3.4 1 2 3
## 302 57 1 0 130 131 0 1 115 1 1.2 1 1 3
## 303 57 0 1 130 236 0 0 174 0 0.0 1 1 2
## target
## 1 1
## 2 1
## 3 1
## 4 1
## 5 1
## 6 1
## 7 1
## 8 1
## 9 1
## 10 1
## 11 1
## 12 1
## 13 1
## 14 1
## 15 1
## 16 1
## 17 1
## 18 1
## 19 1
## 20 1
## 21 1
## 22 1
## 23 1
## 24 1
## 25 1
## 26 1
## 27 1
## 28 1
## 29 1
## 30 1
## 31 1
## 32 1
## 33 1
## 34 1
## 35 1
## 36 1
## 37 1
## 38 1
## 39 1
## 40 1
## 41 1
## 42 1
## 43 1
## 44 1
## 45 1
## 46 1
## 47 1
## 48 1
## 49 1
## 50 1
## 51 1
## 52 1
## 53 1
## 54 1
## 55 1
## 56 1
## 57 1
## 58 1
## 59 1
## 60 1
## 61 1
## 62 1
## 63 1
## 64 1
## 65 1
## 66 1
## 67 1
## 68 1
## 69 1
## 70 1
## 71 1
## 72 1
## 73 1
## 74 1
## 75 1
## 76 1
## 77 1
## 78 1
## 79 1
## 80 1
## 81 1
## 82 1
## 83 1
## 84 1
## 85 1
## 86 1
## 87 1
## 88 1
## 89 1
## 90 1
## 91 1
## 92 1
## 93 1
## 94 1
## 95 1
## 96 1
## 97 1
## 98 1
## 99 1
## 100 1
## 101 1
## 102 1
## 103 1
## 104 1
## 105 1
## 106 1
## 107 1
## 108 1
## 109 1
## 110 1
## 111 1
## 112 1
## 113 1
## 114 1
## 115 1
## 116 1
## 117 1
## 118 1
## 119 1
## 120 1
## 121 1
## 122 1
## 123 1
## 124 1
## 125 1
## 126 1
## 127 1
## 128 1
## 129 1
## 130 1
## 131 1
## 132 1
## 133 1
## 134 1
## 135 1
## 136 1
## 137 1
## 138 1
## 139 1
## 140 1
## 141 1
## 142 1
## 143 1
## 144 1
## 145 1
## 146 1
## 147 1
## 148 1
## 149 1
## 150 1
## 151 1
## 152 1
## 153 1
## 154 1
## 155 1
## 156 1
## 157 1
## 158 1
## 159 1
## 160 1
## 161 1
## 162 1
## 163 1
## 164 1
## 165 1
## 166 0
## 167 0
## 168 0
## 169 0
## 170 0
## 171 0
## 172 0
## 173 0
## 174 0
## 175 0
## 176 0
## 177 0
## 178 0
## 179 0
## 180 0
## 181 0
## 182 0
## 183 0
## 184 0
## 185 0
## 186 0
## 187 0
## 188 0
## 189 0
## 190 0
## 191 0
## 192 0
## 193 0
## 194 0
## 195 0
## 196 0
## 197 0
## 198 0
## 199 0
## 200 0
## 201 0
## 202 0
## 203 0
## 204 0
## 205 0
## 206 0
## 207 0
## 208 0
## 209 0
## 210 0
## 211 0
## 212 0
## 213 0
## 214 0
## 215 0
## 216 0
## 217 0
## 218 0
## 219 0
## 220 0
## 221 0
## 222 0
## 223 0
## 224 0
## 225 0
## 226 0
## 227 0
## 228 0
## 229 0
## 230 0
## 231 0
## 232 0
## 233 0
## 234 0
## 235 0
## 236 0
## 237 0
## 238 0
## 239 0
## 240 0
## 241 0
## 242 0
## 243 0
## 244 0
## 245 0
## 246 0
## 247 0
## 248 0
## 249 0
## 250 0
## 251 0
## 252 0
## 253 0
## 254 0
## 255 0
## 256 0
## 257 0
## 258 0
## 259 0
## 260 0
## 261 0
## 262 0
## 263 0
## 264 0
## 265 0
## 266 0
## 267 0
## 268 0
## 269 0
## 270 0
## 271 0
## 272 0
## 273 0
## 274 0
## 275 0
## 276 0
## 277 0
## 278 0
## 279 0
## 280 0
## 281 0
## 282 0
## 283 0
## 284 0
## 285 0
## 286 0
## 287 0
## 288 0
## 289 0
## 290 0
## 291 0
## 292 0
## 293 0
## 294 0
## 295 0
## 296 0
## 297 0
## 298 0
## 299 0
## 300 0
## 301 0
## 302 0
## 303 0
ggplot(data, aes(x = age)) +
geom_histogram(binwidth = 5, fill = "blue", color = "white") +
labs(title = "Distribusi Umur", x = "Umur", y = "Frekuensi")
## Error in render(x, visible = TRUE, envir = parent.frame()): unused arguments (visible = TRUE, envir = parent.frame())
library(ggplot2)
library(gridExtra)
library(dplyr)
##
## Attaching package: 'dplyr'
## The following object is masked from 'package:gridExtra':
##
## combine
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
data_cleaned <- read.csv("heart.csv")
features_to_plot <- c("age", "trestbps", "chol", "thalach", "oldpeak")
plot_list <- list()
for (feature in features_to_plot[1:4]) { # Exclude 'oldpeak' untuk 2x2 grid
p <- ggplot(data_cleaned, aes_string(x = feature, fill = "factor(target)")) +
geom_histogram(bins = 15, color = "black", alpha = 0.7, position = "identity") +
scale_fill_manual(values = c("#E41A1C", "#377EB8"), name = "Heart Attack Risk") +
labs(title = paste("Distribution of", feature, "by Heart Attack Risk"),
x = feature, y = "Count") +
theme_minimal()
plot_list[[feature]] <- p
}
## Warning: `aes_string()` was deprecated in ggplot2 3.0.0.
## ℹ Please use tidy evaluation idioms with `aes()`.
## ℹ See also `vignette("ggplot2-in-packages")` for more information.
## This warning is displayed once every 8 hours.
## Call `lifecycle::last_lifecycle_warnings()` to see where this warning was
## generated.
grid.arrange(grobs = plot_list, ncol = 2)
detect_missing <- sapply(data_cleaned, function(x) sum(is.na(x)))
detect_missing
## Error in render(x, visible = TRUE, envir = parent.frame()): unused arguments (visible = TRUE, envir = parent.frame())
library(ggplot2)
library(gridExtra)
data_cleaned <- read.csv("heart.csv")
features <- c("age", "trestbps", "chol", "thalach", "oldpeak")
plot_list <- list()
for (feature in features) {
p <- ggplot(data_cleaned, aes_string(x = "factor(1)", y = feature)) +
geom_boxplot(fill = "lightblue", color = "black") +
labs(title = paste("Boxplot of", feature), y = feature, x = "") +
theme_minimal() +
theme(axis.text.x = element_blank(), axis.ticks.x = element_blank())
plot_list[[feature]] <- p
}
do.call(grid.arrange, c(plot_list, ncol = 3))
library(dplyr)
detect_outlier <- function(df, feature) {
q1 <- quantile(df[[feature]], 0.25)
q3 <- quantile(df[[feature]], 0.75)
iqr <- q3 - q1
lower_bound <- q1 - 1.5 * iqr
upper_bound <- q3 + 1.5 * iqr
outliers <- which(df[[feature]] < lower_bound | df[[feature]] > upper_bound)
return(outliers)
}
features_2 <- c("age", "trestbps", "chol", "thalach", "oldpeak")
total_outlier <- 0
outlier_indices <- c()
for (feature in features_2) {
outliers <- detect_outlier(data_cleaned, feature)
total_outlier <- total_outlier + length(outliers)
outlier_indices <- c(outlier_indices, outliers)
}
outlier_indices <- unique(outlier_indices)
percentage <- length(outlier_indices) / nrow(data_cleaned) * 100
cat(sprintf("Outlier percentage: %.2f%%\n", percentage))
## Outlier percentage: 6.27%
data_cleaned_no_outliers <- data_cleaned[-outlier_indices, ]
cat(sprintf("Shape of the dataset after removing outliers: %d rows, %d columns\n",
nrow(data_cleaned_no_outliers), ncol(data_cleaned_no_outliers)))
## Shape of the dataset after removing outliers: 284 rows, 14 columns
library(caret)
## Loading required package: lattice
features_to_scale <- c("age", "trestbps", "chol", "thalach", "oldpeak")
preproc <- preProcess(data_cleaned[, features_to_scale], method = c("center", "scale"))
data_cleaned_scaled <- data_cleaned
data_cleaned_scaled[, features_to_scale] <- predict(preproc, data_cleaned[, features_to_scale])
head(data_cleaned_scaled)
## age sex cp trestbps chol fbs restecg thalach exang
## 1 0.9506240 1 3 0.76269408 -0.25591036 1 0 0.01541728 0
## 2 -1.9121497 1 2 -0.09258463 0.07208025 0 1 1.63077374 0
## 3 -1.4717230 0 1 -0.09258463 -0.81542377 0 0 0.97589950 0
## 4 0.1798773 1 1 -0.66277043 -0.19802967 0 1 1.23784920 0
## 5 0.2899839 0 0 -0.66277043 2.07861109 0 1 0.58297496 1
## 6 0.2899839 1 0 0.47760118 -1.04694656 0 1 -0.07189928 0
## oldpeak slope ca thal target
## 1 1.0855423 0 0 1 1
## 2 2.1190672 0 0 2 1
## 3 0.3103986 2 0 2 1
## 4 -0.2063639 2 0 2 1
## 5 -0.3786180 2 0 2 1
## 6 -0.5508722 1 0 1 1
library(ggplot2)
library(gridExtra)
features_to_plot <- c("age", "trestbps", "chol", "thalach", "oldpeak")
plot_list <- list()
for (feature in features_to_plot[-length(features_to_plot)]) {
p <- ggplot(data_cleaned, aes_string(x = feature, fill = "factor(target)")) +
geom_histogram(bins = 15, position = "identity", alpha = 0.6, color = "black") +
labs(title = paste("Distribution of", feature, "by Heart Attack Risk"),
x = feature,
y = "Count") +
scale_fill_manual(values = c("lightblue", "salmon")) +
theme_minimal()
plot_list[[feature]] <- p
}
grid.arrange(grobs = plot_list, ncol = 2)
library(caret)
set.seed(123)
train_index <- createDataPartition(data_cleaned$target, p = 0.7, list = FALSE)
train_data <- data_cleaned[train_index, ]
test_data <- data_cleaned[-train_index, ]
all_columns <- paste(names(data_cleaned)[1:13], collapse = "+")
formula <- as.formula(paste("target ~", all_columns))
print(paste("Formula:", formula))
## [1] "Formula: ~"
## [2] "Formula: target"
## [3] "Formula: age + sex + cp + trestbps + chol + fbs + restecg + thalach + exang + oldpeak + slope + ca + thal"
log_reg <- glm(formula, data = train_data, family = binomial)
summary(log_reg)
## Error in render(x, visible = TRUE, envir = parent.frame()): unused arguments (visible = TRUE, envir = parent.frame())
library(caret)
pred_prob <- predict(log_reg, newdata = test_data, type = "response")
pred_class <- ifelse(pred_prob > 0.5, 1, 0)
accuracy <- mean(pred_class == test_data$target)
print(paste("Accuracy:", accuracy))
## [1] "Accuracy: 0.844444444444444"
cm <- confusionMatrix(as.factor(pred_class), as.factor(test_data$target))
print(cm)
## Confusion Matrix and Statistics
##
## Reference
## Prediction 0 1
## 0 33 4
## 1 10 43
##
## Accuracy : 0.8444
## 95% CI : (0.7528, 0.9123)
## No Information Rate : 0.5222
## P-Value [Acc > NIR] : 1.307e-10
##
## Kappa : 0.6864
##
## Mcnemar's Test P-Value : 0.1814
##
## Sensitivity : 0.7674
## Specificity : 0.9149
## Pos Pred Value : 0.8919
## Neg Pred Value : 0.8113
## Prevalence : 0.4778
## Detection Rate : 0.3667
## Detection Prevalence : 0.4111
## Balanced Accuracy : 0.8412
##
## 'Positive' Class : 0
##