STA1381: Nonlinear Regression Part 1
1. Pendahuluan
Library
library(MultiKink) # DATA TRICEPS
library(tidyverse)
library(ggplot2)
library(dplyr)
library(purrr)
library(rsample)
library(splines)2. Contoh Pada Kuliah
set.seed(132)
data.x <- rnorm(1000,1,1)
err <- rnorm(1000)
y <- 5+2*data.x+3*data.x^2+err ## Model Polynomial Ordo 2
plot(data.x,y,xlim=c(-2,5), ylim=c(-10,70))2.1 Regresi Linier
lin.mod <-lm( y~data.x)
plot( data.x,y,xlim=c( -2,5), ylim=c( -10,70), xlab = "x", main = "Plot Data Bangkitan Fungsi Regresi Linier")
lines(data.x,lin.mod$fitted.values,col="green")summary(lin.mod)##
## Call:
## lm(formula = y ~ data.x)
##
## Residuals:
## Min 1Q Median 3Q Max
## -5.702 -2.473 -1.280 1.083 30.398
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 4.8479 0.1855 26.13 <2e-16 ***
## data.x 8.0726 0.1310 61.61 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 4.098 on 998 degrees of freedom
## Multiple R-squared: 0.7918, Adjusted R-squared: 0.7916
## F-statistic: 3796 on 1 and 998 DF, p-value: < 2.2e-16
2.3 Pemodelan Polynomial Ordo 2
pol.mod <- lm( y~data.x+I(data.x^2)) # ordo 2
ix <- sort( data.x,index.return=T)$ix
plot(data.x,y,xlim=c(-2,5), ylim=c(-10,70), xlab = "x", main = "Plot Data Bangkitan Fungsi Polinomial")
lines(data.x[ix], pol.mod$fitted.values[ix],col="red", cex=4)summary(pol.mod)##
## Call:
## lm(formula = y ~ data.x + I(data.x^2))
##
## Residuals:
## Min 1Q Median 3Q Max
## -3.07346 -0.69900 -0.01143 0.66739 3.01532
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 5.01559 0.04622 108.53 <2e-16 ***
## data.x 2.00292 0.05920 33.83 <2e-16 ***
## I(data.x^2) 2.98386 0.02428 122.88 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 1.021 on 997 degrees of freedom
## Multiple R-squared: 0.9871, Adjusted R-squared: 0.9871
## F-statistic: 3.816e+04 on 2 and 997 DF, p-value: < 2.2e-16
2.4 Pemodelan Fungsi Tangga
range( data.x)## [1] -1.541987 4.268654
c1 <- as.factor(ifelse(data.x<=0,1,0))
c2 <- as.factor(ifelse(data.x<=2 & data.x>0,1,0))
c3 <- as.factor(ifelse(data.x>2,1,0))
data.frame(y, c1, c2, c3)## y c1 c2 c3
## 1 14.570286 0 1 0
## 2 6.566656 0 1 0
## 3 10.329115 0 1 0
## 4 22.807881 0 0 1
## 5 3.934351 0 1 0
## 6 5.649999 0 1 0
## 7 5.658001 1 0 0
## 8 4.632921 1 0 0
## 9 18.181167 0 1 0
## 10 44.577619 0 0 1
## 11 11.135113 0 1 0
## 12 3.424084 1 0 0
## 13 15.568199 0 1 0
## 14 6.287425 0 1 0
## 15 18.619900 0 1 0
## 16 18.766264 0 1 0
## 17 6.698679 1 0 0
## 18 10.484882 0 1 0
## 19 6.363476 0 1 0
## 20 23.221741 0 0 1
## 21 5.651448 0 1 0
## 22 7.305728 0 1 0
## 23 17.543524 0 1 0
## 24 15.501525 0 1 0
## 25 6.981657 1 0 0
## 26 5.559902 0 1 0
## 27 13.223166 0 1 0
## 28 13.853019 0 1 0
## 29 18.800637 0 1 0
## 30 7.210177 0 1 0
## 31 27.539257 0 0 1
## 32 19.438830 0 0 1
## 33 9.361030 0 1 0
## 34 4.158439 0 1 0
## 35 5.845277 1 0 0
## 36 4.776836 1 0 0
## 37 22.661901 0 0 1
## 38 9.961335 0 1 0
## 39 12.340159 0 1 0
## 40 16.065970 0 1 0
## 41 37.918860 0 0 1
## 42 6.384015 0 1 0
## 43 7.079783 0 1 0
## 44 8.860863 0 1 0
## 45 8.809076 0 1 0
## 46 22.535998 0 0 1
## 47 22.036686 0 0 1
## 48 6.059367 0 1 0
## 49 13.363176 0 1 0
## 50 13.217011 0 1 0
## 51 25.411429 0 0 1
## 52 7.392440 0 1 0
## 53 15.108241 0 1 0
## 54 16.187270 0 1 0
## 55 5.670092 0 1 0
## 56 4.166716 1 0 0
## 57 9.240975 0 1 0
## 58 12.428567 0 1 0
## 59 9.362061 0 1 0
## 60 5.574627 0 1 0
## 61 16.216857 0 1 0
## 62 23.314157 0 0 1
## 63 17.790341 0 1 0
## 64 24.545877 0 0 1
## 65 18.528198 0 1 0
## 66 6.655582 1 0 0
## 67 7.995971 0 1 0
## 68 16.510735 0 1 0
## 69 14.151828 0 1 0
## 70 6.753650 0 1 0
## 71 8.856977 0 1 0
## 72 4.826643 0 1 0
## 73 25.174732 0 0 1
## 74 3.796930 1 0 0
## 75 34.708716 0 0 1
## 76 8.217068 0 1 0
## 77 15.001696 0 1 0
## 78 7.689284 1 0 0
## 79 21.854528 0 1 0
## 80 5.845803 0 1 0
## 81 31.113259 0 0 1
## 82 17.286007 0 1 0
## 83 5.314718 0 1 0
## 84 40.879718 0 0 1
## 85 13.895320 0 1 0
## 86 6.992347 0 1 0
## 87 23.824184 0 0 1
## 88 9.891115 0 1 0
## 89 5.312053 0 1 0
## 90 7.441131 0 1 0
## 91 16.431773 0 1 0
## 92 12.742737 0 1 0
## 93 18.542026 0 1 0
## 94 4.843634 0 1 0
## 95 6.072740 0 1 0
## 96 13.726648 0 1 0
## 97 14.640958 0 1 0
## 98 15.638339 0 1 0
## 99 6.542436 0 1 0
## 100 5.210588 0 1 0
## 101 6.997937 0 1 0
## 102 23.557399 0 0 1
## 103 21.231727 0 1 0
## 104 4.833060 1 0 0
## 105 4.000923 1 0 0
## 106 4.185784 1 0 0
## 107 16.903030 0 1 0
## 108 5.399342 1 0 0
## 109 4.709404 1 0 0
## 110 7.192708 0 1 0
## 111 11.736354 0 1 0
## 112 9.673892 0 1 0
## 113 5.980926 1 0 0
## 114 5.425383 1 0 0
## 115 4.608338 0 1 0
## 116 19.456372 0 1 0
## 117 34.185021 0 0 1
## 118 16.261473 0 1 0
## 119 13.162163 0 1 0
## 120 3.709129 1 0 0
## 121 4.179378 1 0 0
## 122 5.937297 1 0 0
## 123 4.524322 0 1 0
## 124 7.580452 0 1 0
## 125 4.784415 1 0 0
## 126 6.876443 0 1 0
## 127 17.394110 0 1 0
## 128 29.174642 0 0 1
## 129 26.886432 0 0 1
## 130 20.499501 0 1 0
## 131 6.537818 0 1 0
## 132 11.476574 0 1 0
## 133 12.589651 0 1 0
## 134 14.398028 0 1 0
## 135 27.476309 0 0 1
## 136 17.852884 0 1 0
## 137 35.459721 0 0 1
## 138 34.463114 0 0 1
## 139 11.790211 0 1 0
## 140 3.577594 1 0 0
## 141 9.394579 0 1 0
## 142 5.825141 0 1 0
## 143 8.475888 0 1 0
## 144 17.230558 0 1 0
## 145 7.838497 1 0 0
## 146 37.482134 0 0 1
## 147 5.277227 0 1 0
## 148 7.503100 0 1 0
## 149 4.702587 1 0 0
## 150 27.239733 0 0 1
## 151 31.207808 0 0 1
## 152 37.438484 0 0 1
## 153 49.369821 0 0 1
## 154 6.749678 0 1 0
## 155 4.697560 1 0 0
## 156 7.022218 0 1 0
## 157 10.114892 0 1 0
## 158 43.427576 0 0 1
## 159 25.296227 0 0 1
## 160 7.155827 1 0 0
## 161 4.256648 0 1 0
## 162 21.393393 0 1 0
## 163 10.388219 0 1 0
## 164 3.433120 1 0 0
## 165 5.009573 0 1 0
## 166 18.291974 0 1 0
## 167 13.180955 0 1 0
## 168 6.591546 1 0 0
## 169 11.967211 0 1 0
## 170 5.050528 1 0 0
## 171 8.378746 0 1 0
## 172 10.432598 0 1 0
## 173 23.864870 0 0 1
## 174 7.152799 0 1 0
## 175 9.265299 0 1 0
## 176 6.222974 0 1 0
## 177 8.595954 0 1 0
## 178 7.085497 1 0 0
## 179 46.243042 0 0 1
## 180 12.057891 0 1 0
## 181 9.157102 0 1 0
## 182 5.809620 0 1 0
## 183 22.591892 0 0 1
## 184 48.238504 0 0 1
## 185 18.796426 0 1 0
## 186 27.958504 0 0 1
## 187 9.522034 0 1 0
## 188 16.627530 0 1 0
## 189 13.880153 0 1 0
## 190 11.340332 0 1 0
## 191 4.791549 1 0 0
## 192 7.195392 0 1 0
## 193 32.843528 0 0 1
## 194 11.896467 0 1 0
## 195 17.474486 0 1 0
## 196 11.213407 0 1 0
## 197 7.916235 0 1 0
## 198 11.477626 0 1 0
## 199 14.545664 0 1 0
## 200 29.692815 0 0 1
## 201 20.140683 0 1 0
## 202 18.372091 0 1 0
## 203 29.187120 0 0 1
## 204 12.150727 0 1 0
## 205 17.154812 0 1 0
## 206 9.879885 0 1 0
## 207 5.341427 1 0 0
## 208 9.675749 0 1 0
## 209 18.978497 0 1 0
## 210 6.328527 1 0 0
## 211 8.693550 0 1 0
## 212 7.933455 0 1 0
## 213 13.435700 0 1 0
## 214 11.183594 0 1 0
## 215 12.675765 0 1 0
## 216 3.384945 1 0 0
## 217 8.459540 0 1 0
## 218 7.590718 0 1 0
## 219 5.872838 0 1 0
## 220 6.355172 0 1 0
## 221 9.224570 0 1 0
## 222 4.681087 0 1 0
## 223 9.890506 0 1 0
## 224 19.509201 0 1 0
## 225 5.163626 1 0 0
## 226 19.850588 0 1 0
## 227 15.490396 0 1 0
## 228 31.800778 0 0 1
## 229 6.072666 0 1 0
## 230 11.566975 0 1 0
## 231 6.007829 1 0 0
## 232 9.068399 0 1 0
## 233 5.844906 0 1 0
## 234 7.692278 0 1 0
## 235 14.311837 0 1 0
## 236 4.678871 0 1 0
## 237 12.825990 0 1 0
## 238 11.519170 0 1 0
## 239 5.981918 0 1 0
## 240 7.549591 0 1 0
## 241 8.654838 0 1 0
## 242 7.281176 0 1 0
## 243 5.348288 1 0 0
## 244 3.539164 1 0 0
## 245 12.087102 0 1 0
## 246 10.000431 0 1 0
## 247 11.225210 0 1 0
## 248 10.102880 0 1 0
## 249 5.777154 1 0 0
## 250 4.378735 1 0 0
## 251 10.119505 0 1 0
## 252 11.761234 0 1 0
## 253 9.834651 0 1 0
## 254 16.863504 0 1 0
## 255 35.174103 0 0 1
## 256 12.942274 0 1 0
## 257 8.344721 0 1 0
## 258 5.093260 1 0 0
## 259 4.853307 0 1 0
## 260 13.187564 0 1 0
## 261 26.782002 0 0 1
## 262 6.912229 1 0 0
## 263 9.146530 0 1 0
## 264 4.905876 1 0 0
## 265 14.309059 0 1 0
## 266 9.793710 0 1 0
## 267 18.709208 0 1 0
## 268 23.772118 0 0 1
## 269 14.368053 0 1 0
## 270 3.309852 1 0 0
## 271 15.270947 0 1 0
## 272 9.901285 0 1 0
## 273 8.747831 0 1 0
## 274 15.674163 0 1 0
## 275 15.157237 0 1 0
## 276 20.518275 0 0 1
## 277 11.724857 0 1 0
## 278 9.207231 0 1 0
## 279 32.086073 0 0 1
## 280 9.028289 0 1 0
## 281 18.390686 0 1 0
## 282 11.541152 0 1 0
## 283 8.818720 0 1 0
## 284 4.836382 0 1 0
## 285 22.892230 0 0 1
## 286 10.186000 0 1 0
## 287 32.194447 0 0 1
## 288 29.067244 0 0 1
## 289 8.449165 0 1 0
## 290 5.067709 1 0 0
## 291 16.671027 0 1 0
## 292 7.645317 1 0 0
## 293 20.114466 0 1 0
## 294 4.757917 1 0 0
## 295 9.194284 0 1 0
## 296 6.106637 0 1 0
## 297 10.121361 0 1 0
## 298 24.322739 0 0 1
## 299 4.891748 0 1 0
## 300 5.407629 0 1 0
## 301 16.874099 0 1 0
## 302 12.612690 0 1 0
## 303 19.124565 0 1 0
## 304 16.232381 0 1 0
## 305 6.464321 0 1 0
## 306 22.535086 0 0 1
## 307 11.492791 0 1 0
## 308 29.480720 0 0 1
## 309 7.635800 0 1 0
## 310 10.216759 0 1 0
## 311 7.341828 1 0 0
## 312 4.804554 1 0 0
## 313 59.933609 0 0 1
## 314 5.428302 0 1 0
## 315 4.416648 1 0 0
## 316 7.043549 0 1 0
## 317 21.932944 0 0 1
## 318 12.783135 0 1 0
## 319 12.353954 0 1 0
## 320 10.228692 0 1 0
## 321 8.864372 0 1 0
## 322 23.361093 0 0 1
## 323 4.378791 1 0 0
## 324 11.154497 0 1 0
## 325 18.460462 0 1 0
## 326 12.283500 0 1 0
## 327 8.553415 0 1 0
## 328 5.466454 1 0 0
## 329 6.866800 0 1 0
## 330 4.585335 1 0 0
## 331 12.456828 0 1 0
## 332 8.932326 0 1 0
## 333 34.423778 0 0 1
## 334 16.947956 0 1 0
## 335 18.283827 0 1 0
## 336 17.878546 0 1 0
## 337 17.068312 0 1 0
## 338 6.779712 0 1 0
## 339 21.693634 0 0 1
## 340 7.330590 0 1 0
## 341 15.400408 0 1 0
## 342 3.761207 0 1 0
## 343 5.584314 1 0 0
## 344 6.756562 0 1 0
## 345 11.143771 0 1 0
## 346 11.785853 0 1 0
## 347 12.649733 0 1 0
## 348 22.045903 0 0 1
## 349 13.449546 0 1 0
## 350 9.908874 0 1 0
## 351 13.449152 0 1 0
## 352 10.376748 0 1 0
## 353 18.471684 0 1 0
## 354 15.721194 0 1 0
## 355 8.798848 0 1 0
## 356 6.908674 0 1 0
## 357 13.588911 0 1 0
## 358 6.707476 0 1 0
## 359 4.268173 1 0 0
## 360 8.692844 0 1 0
## 361 4.894048 1 0 0
## 362 12.474788 0 1 0
## 363 5.016525 1 0 0
## 364 14.768019 0 1 0
## 365 37.868793 0 0 1
## 366 5.218214 0 1 0
## 367 3.761937 1 0 0
## 368 29.589428 0 0 1
## 369 23.996357 0 0 1
## 370 6.227490 0 1 0
## 371 4.313131 1 0 0
## 372 15.155290 0 1 0
## 373 11.579762 0 1 0
## 374 9.423520 0 1 0
## 375 13.000088 0 1 0
## 376 26.846232 0 0 1
## 377 4.960701 1 0 0
## 378 38.747217 0 0 1
## 379 20.325956 0 0 1
## 380 7.228657 0 1 0
## 381 8.568896 0 1 0
## 382 13.685347 0 1 0
## 383 5.889749 0 1 0
## 384 5.839059 0 1 0
## 385 7.644309 0 1 0
## 386 6.558394 0 1 0
## 387 9.911268 0 1 0
## 388 19.558105 0 1 0
## 389 8.205626 0 1 0
## 390 21.150950 0 0 1
## 391 12.584578 0 1 0
## 392 22.208442 0 1 0
## 393 5.634684 0 1 0
## 394 4.304688 0 1 0
## 395 9.682129 0 1 0
## 396 5.096074 0 1 0
## 397 25.366863 0 0 1
## 398 21.214092 0 0 1
## 399 5.666719 0 1 0
## 400 18.109110 0 1 0
## 401 5.455467 1 0 0
## 402 33.005685 0 0 1
## 403 4.525428 0 1 0
## 404 19.970277 0 1 0
## 405 6.672044 0 1 0
## 406 11.450277 0 1 0
## 407 5.872505 0 1 0
## 408 9.513655 0 1 0
## 409 14.166658 0 1 0
## 410 13.361101 0 1 0
## 411 11.817589 0 1 0
## 412 9.919432 0 1 0
## 413 7.604294 0 1 0
## 414 4.071313 0 1 0
## 415 24.765376 0 0 1
## 416 25.709306 0 0 1
## 417 10.213297 1 0 0
## 418 21.785840 0 0 1
## 419 14.891296 0 1 0
## 420 34.208700 0 0 1
## 421 10.138849 0 1 0
## 422 14.079702 0 1 0
## 423 8.614308 0 1 0
## 424 5.392895 0 1 0
## 425 43.649949 0 0 1
## 426 3.519808 0 1 0
## 427 20.630535 0 1 0
## 428 13.422990 0 1 0
## 429 4.997588 1 0 0
## 430 5.612911 0 1 0
## 431 10.423159 0 1 0
## 432 15.623952 0 1 0
## 433 3.193813 1 0 0
## 434 20.136501 0 1 0
## 435 4.549518 0 1 0
## 436 5.260815 0 1 0
## 437 5.382300 0 1 0
## 438 15.100798 0 1 0
## 439 3.709501 1 0 0
## 440 4.695959 0 1 0
## 441 17.672348 0 1 0
## 442 4.896406 1 0 0
## 443 10.449119 0 1 0
## 444 12.045116 0 1 0
## 445 13.272337 0 1 0
## 446 12.769513 0 1 0
## 447 5.486248 1 0 0
## 448 5.450483 0 1 0
## 449 18.314195 0 1 0
## 450 5.622717 1 0 0
## 451 9.735100 0 1 0
## 452 6.005070 0 1 0
## 453 14.180975 0 1 0
## 454 15.762478 0 1 0
## 455 16.012710 0 1 0
## 456 12.574844 0 1 0
## 457 6.195508 1 0 0
## 458 27.420263 0 0 1
## 459 6.956660 0 1 0
## 460 6.180430 0 1 0
## 461 5.421235 0 1 0
## 462 4.478072 1 0 0
## 463 4.458062 1 0 0
## 464 8.373322 0 1 0
## 465 6.055553 0 1 0
## 466 5.588783 1 0 0
## 467 31.797999 0 0 1
## 468 42.729277 0 0 1
## 469 10.241122 0 1 0
## 470 43.272486 0 0 1
## 471 12.148543 0 1 0
## 472 38.290619 0 0 1
## 473 17.127711 0 1 0
## 474 9.219040 0 1 0
## 475 5.602146 0 1 0
## 476 5.581360 1 0 0
## 477 8.670849 0 1 0
## 478 7.207084 1 0 0
## 479 27.907361 0 0 1
## 480 8.058740 1 0 0
## 481 5.094312 0 1 0
## 482 12.146211 0 1 0
## 483 15.115253 0 1 0
## 484 8.206884 0 1 0
## 485 7.600787 0 1 0
## 486 9.188742 0 1 0
## 487 34.914645 0 0 1
## 488 7.986432 0 1 0
## 489 24.875632 0 0 1
## 490 5.859232 0 1 0
## 491 18.034524 0 1 0
## 492 14.473442 0 1 0
## 493 15.243177 0 1 0
## 494 13.642880 0 1 0
## 495 3.785573 0 1 0
## 496 4.998889 1 0 0
## 497 25.901035 0 0 1
## 498 27.912293 0 0 1
## 499 4.375104 0 1 0
## 500 12.015946 0 1 0
## 501 6.383106 1 0 0
## 502 19.746783 0 1 0
## 503 24.602883 0 0 1
## 504 16.772707 0 1 0
## 505 9.452809 0 1 0
## 506 8.767732 0 1 0
## 507 7.274324 0 1 0
## 508 5.428331 0 1 0
## 509 20.058735 0 1 0
## 510 5.982959 1 0 0
## 511 6.352925 0 1 0
## 512 19.202259 0 1 0
## 513 8.619824 0 1 0
## 514 4.451430 1 0 0
## 515 5.605763 0 1 0
## 516 9.033694 0 1 0
## 517 17.932628 0 1 0
## 518 16.122446 0 1 0
## 519 5.156926 1 0 0
## 520 4.418505 0 1 0
## 521 5.824134 1 0 0
## 522 8.684570 0 1 0
## 523 10.011409 0 1 0
## 524 3.037463 1 0 0
## 525 7.561926 0 1 0
## 526 24.952878 0 0 1
## 527 12.063397 0 1 0
## 528 7.376601 1 0 0
## 529 29.208257 0 0 1
## 530 26.104141 0 0 1
## 531 20.167246 0 0 1
## 532 7.211531 0 1 0
## 533 4.851442 1 0 0
## 534 11.060047 0 1 0
## 535 19.322415 0 1 0
## 536 17.696161 0 1 0
## 537 2.612427 1 0 0
## 538 9.966140 0 1 0
## 539 19.336025 0 1 0
## 540 9.367115 0 1 0
## 541 21.576443 0 0 1
## 542 15.135662 0 1 0
## 543 5.482017 0 1 0
## 544 8.896452 0 1 0
## 545 3.873304 1 0 0
## 546 9.900657 0 1 0
## 547 13.996519 0 1 0
## 548 16.759208 0 1 0
## 549 4.846420 0 1 0
## 550 14.789193 0 1 0
## 551 9.165459 0 1 0
## 552 13.439117 0 1 0
## 553 7.844315 0 1 0
## 554 28.255260 0 0 1
## 555 13.481653 0 1 0
## 556 10.642768 0 1 0
## 557 9.216535 1 0 0
## 558 6.115206 0 1 0
## 559 4.761521 1 0 0
## 560 34.696783 0 0 1
## 561 24.160648 0 0 1
## 562 5.075202 1 0 0
## 563 10.462490 0 1 0
## 564 6.332898 1 0 0
## 565 5.819836 0 1 0
## 566 5.627912 1 0 0
## 567 4.689085 1 0 0
## 568 18.531795 0 1 0
## 569 9.587989 0 1 0
## 570 3.965201 1 0 0
## 571 8.999694 0 1 0
## 572 16.386439 0 1 0
## 573 6.572343 1 0 0
## 574 19.916448 0 1 0
## 575 10.646113 0 1 0
## 576 7.622829 0 1 0
## 577 17.958406 0 1 0
## 578 13.542991 0 1 0
## 579 5.957436 1 0 0
## 580 8.653288 0 1 0
## 581 5.396040 0 1 0
## 582 13.242776 0 1 0
## 583 14.238363 0 1 0
## 584 7.749255 0 1 0
## 585 3.924178 0 1 0
## 586 6.146938 1 0 0
## 587 12.901767 0 1 0
## 588 14.413271 0 1 0
## 589 8.484197 0 1 0
## 590 7.086556 0 1 0
## 591 5.996210 0 1 0
## 592 9.266507 0 1 0
## 593 6.158747 1 0 0
## 594 15.407328 0 1 0
## 595 9.666986 0 1 0
## 596 3.695108 1 0 0
## 597 3.078356 1 0 0
## 598 20.919825 0 0 1
## 599 27.053407 0 0 1
## 600 9.978980 0 1 0
## 601 6.184277 1 0 0
## 602 17.533674 0 1 0
## 603 23.644112 0 0 1
## 604 16.250417 0 1 0
## 605 4.896958 0 1 0
## 606 5.240408 1 0 0
## 607 11.740040 0 1 0
## 608 5.270384 0 1 0
## 609 4.968016 1 0 0
## 610 8.050696 0 1 0
## 611 5.477519 0 1 0
## 612 3.902905 0 1 0
## 613 25.506843 0 0 1
## 614 6.886888 0 1 0
## 615 26.842652 0 0 1
## 616 13.751573 0 1 0
## 617 37.099530 0 0 1
## 618 26.859819 0 0 1
## 619 5.632853 0 1 0
## 620 4.170164 1 0 0
## 621 50.508007 0 0 1
## 622 9.361730 0 1 0
## 623 7.259316 0 1 0
## 624 7.446832 1 0 0
## 625 8.600924 0 1 0
## 626 12.967533 0 1 0
## 627 17.079396 0 1 0
## 628 13.090854 0 1 0
## 629 4.335806 1 0 0
## 630 19.709579 0 1 0
## 631 5.633631 0 1 0
## 632 13.081106 0 1 0
## 633 15.661197 0 1 0
## 634 27.894625 0 0 1
## 635 25.395798 0 0 1
## 636 5.783170 0 1 0
## 637 2.707225 1 0 0
## 638 5.321565 1 0 0
## 639 18.349898 0 1 0
## 640 4.234837 1 0 0
## 641 9.220796 0 1 0
## 642 11.874354 0 1 0
## 643 17.313773 0 1 0
## 644 20.764250 0 0 1
## 645 20.557446 0 1 0
## 646 19.394320 0 1 0
## 647 11.969660 0 1 0
## 648 19.636364 0 1 0
## 649 5.349185 1 0 0
## 650 5.610799 0 1 0
## 651 6.109607 0 1 0
## 652 11.070461 0 1 0
## 653 11.922367 0 1 0
## 654 11.229041 0 1 0
## 655 6.029420 1 0 0
## 656 2.534153 1 0 0
## 657 15.130454 0 1 0
## 658 4.023352 1 0 0
## 659 25.074705 0 0 1
## 660 6.163533 0 1 0
## 661 21.420271 0 1 0
## 662 5.333090 1 0 0
## 663 5.607856 0 1 0
## 664 8.910740 0 1 0
## 665 7.469281 0 1 0
## 666 8.588591 0 1 0
## 667 14.541433 0 1 0
## 668 17.536101 0 1 0
## 669 7.789242 0 1 0
## 670 10.718377 0 1 0
## 671 9.419811 0 1 0
## 672 14.467753 0 1 0
## 673 18.375194 0 1 0
## 674 6.486768 0 1 0
## 675 6.112667 0 1 0
## 676 24.405518 0 0 1
## 677 14.880782 0 1 0
## 678 7.017730 0 1 0
## 679 6.690891 0 1 0
## 680 13.493567 0 1 0
## 681 5.707655 1 0 0
## 682 19.095488 0 0 1
## 683 13.010214 0 1 0
## 684 6.951072 0 1 0
## 685 45.223401 0 0 1
## 686 10.885002 0 1 0
## 687 6.872303 0 1 0
## 688 4.836543 1 0 0
## 689 19.440216 0 1 0
## 690 7.278375 0 1 0
## 691 3.640875 1 0 0
## 692 5.057244 1 0 0
## 693 5.295444 1 0 0
## 694 11.677974 0 1 0
## 695 4.932423 0 1 0
## 696 16.792624 0 1 0
## 697 6.448536 0 1 0
## 698 27.005010 0 0 1
## 699 19.281469 0 1 0
## 700 18.485464 0 1 0
## 701 5.390173 1 0 0
## 702 12.337698 0 1 0
## 703 6.459783 0 1 0
## 704 4.810539 0 1 0
## 705 12.934199 0 1 0
## 706 16.987670 0 1 0
## 707 15.550709 0 1 0
## 708 6.083461 1 0 0
## 709 10.082615 0 1 0
## 710 19.114076 0 1 0
## 711 22.888788 0 0 1
## 712 5.693351 0 1 0
## 713 4.712674 1 0 0
## 714 22.240359 0 0 1
## 715 8.285187 0 1 0
## 716 10.299718 0 1 0
## 717 9.139675 0 1 0
## 718 11.768798 0 1 0
## 719 8.504935 0 1 0
## 720 3.516907 1 0 0
## 721 4.396247 1 0 0
## 722 38.267480 0 0 1
## 723 6.217555 0 1 0
## 724 13.913599 0 1 0
## 725 12.001413 0 1 0
## 726 19.603199 0 1 0
## 727 17.013009 0 1 0
## 728 5.967160 1 0 0
## 729 6.257117 0 1 0
## 730 15.701811 0 1 0
## 731 12.620926 0 1 0
## 732 7.259447 0 1 0
## 733 9.228920 0 1 0
## 734 12.903700 0 1 0
## 735 14.936891 0 1 0
## 736 3.879509 1 0 0
## 737 5.570628 0 1 0
## 738 3.697215 1 0 0
## 739 32.422164 0 0 1
## 740 23.404263 0 0 1
## 741 15.186666 0 1 0
## 742 5.014406 1 0 0
## 743 13.874640 0 1 0
## 744 27.307431 0 0 1
## 745 9.820979 0 1 0
## 746 3.134630 0 1 0
## 747 8.741525 0 1 0
## 748 20.667307 0 1 0
## 749 21.444654 0 0 1
## 750 23.065879 0 0 1
## 751 41.813365 0 0 1
## 752 14.951434 0 1 0
## 753 7.057128 0 1 0
## 754 20.879827 0 1 0
## 755 6.464234 1 0 0
## 756 8.463946 0 1 0
## 757 5.717482 1 0 0
## 758 9.637973 0 1 0
## 759 17.702292 0 1 0
## 760 12.067892 0 1 0
## 761 5.335880 1 0 0
## 762 16.481561 0 1 0
## 763 9.715023 0 1 0
## 764 8.984521 0 1 0
## 765 27.924151 0 0 1
## 766 18.757758 0 1 0
## 767 28.323710 0 0 1
## 768 16.978100 0 1 0
## 769 12.996953 0 1 0
## 770 38.154992 0 0 1
## 771 15.401674 0 1 0
## 772 5.688146 0 1 0
## 773 14.423736 0 1 0
## 774 12.177769 0 1 0
## 775 46.592191 0 0 1
## 776 3.752173 1 0 0
## 777 7.989933 0 1 0
## 778 22.328652 0 0 1
## 779 11.807877 0 1 0
## 780 13.451134 0 1 0
## 781 7.085248 0 1 0
## 782 69.705578 0 0 1
## 783 2.994127 0 1 0
## 784 8.976676 0 1 0
## 785 19.351752 0 1 0
## 786 19.681295 0 1 0
## 787 10.038341 0 1 0
## 788 10.019592 0 1 0
## 789 6.877523 1 0 0
## 790 3.151250 1 0 0
## 791 24.388752 0 0 1
## 792 12.788088 0 1 0
## 793 19.497575 0 1 0
## 794 7.714430 0 1 0
## 795 11.241645 0 1 0
## 796 3.413448 1 0 0
## 797 8.825431 0 1 0
## 798 11.443574 0 1 0
## 799 12.990324 0 1 0
## 800 13.357051 0 1 0
## 801 8.415011 0 1 0
## 802 6.059731 0 1 0
## 803 15.322877 0 1 0
## 804 6.741050 0 1 0
## 805 4.643075 1 0 0
## 806 5.533238 0 1 0
## 807 16.231999 0 1 0
## 808 3.933745 0 1 0
## 809 7.427813 0 1 0
## 810 19.803588 0 0 1
## 811 4.371670 1 0 0
## 812 7.262709 1 0 0
## 813 8.317222 0 1 0
## 814 5.477303 0 1 0
## 815 16.139243 0 1 0
## 816 7.488749 0 1 0
## 817 14.713100 0 1 0
## 818 16.731362 0 1 0
## 819 21.042198 0 1 0
## 820 5.984803 1 0 0
## 821 17.971586 0 1 0
## 822 6.984981 1 0 0
## 823 25.867423 0 0 1
## 824 22.888755 0 0 1
## 825 8.717562 0 1 0
## 826 9.895423 0 1 0
## 827 13.285545 0 1 0
## 828 12.549273 0 1 0
## 829 5.041856 1 0 0
## 830 4.669675 1 0 0
## 831 12.397354 0 1 0
## 832 6.605857 0 1 0
## 833 16.880241 0 1 0
## 834 24.148882 0 0 1
## 835 3.049497 1 0 0
## 836 10.095754 0 1 0
## 837 24.709076 0 0 1
## 838 5.583954 1 0 0
## 839 41.556706 0 0 1
## 840 4.811342 0 1 0
## 841 42.561055 0 0 1
## 842 23.335540 0 0 1
## 843 17.884814 0 1 0
## 844 13.863022 0 1 0
## 845 4.381563 0 1 0
## 846 20.964068 0 0 1
## 847 28.267388 0 0 1
## 848 9.318960 0 1 0
## 849 9.524073 1 0 0
## 850 8.352785 0 1 0
## 851 5.482847 0 1 0
## 852 7.680007 0 1 0
## 853 6.239050 0 1 0
## 854 7.331449 0 1 0
## 855 12.499404 0 1 0
## 856 12.893126 0 1 0
## 857 15.551016 0 1 0
## 858 11.223877 0 1 0
## 859 8.565451 0 1 0
## 860 6.329713 0 1 0
## 861 11.002807 0 1 0
## 862 6.378891 0 1 0
## 863 21.610041 0 1 0
## 864 17.767601 0 1 0
## 865 6.550090 0 1 0
## 866 18.670887 0 1 0
## 867 13.653848 0 1 0
## 868 11.125625 0 1 0
## 869 12.243333 0 1 0
## 870 24.147622 0 0 1
## 871 12.397344 0 1 0
## 872 9.722713 0 1 0
## 873 24.885094 0 0 1
## 874 21.721876 0 1 0
## 875 3.762430 0 1 0
## 876 6.528334 0 1 0
## 877 17.984875 0 1 0
## 878 31.076241 0 0 1
## 879 23.333158 0 0 1
## 880 14.643970 0 1 0
## 881 13.249404 0 1 0
## 882 4.899952 0 1 0
## 883 9.910183 0 1 0
## 884 7.705212 1 0 0
## 885 7.765760 0 1 0
## 886 21.918587 0 0 1
## 887 16.619802 0 1 0
## 888 19.013983 0 1 0
## 889 7.741480 0 1 0
## 890 8.074657 0 1 0
## 891 15.705045 0 1 0
## 892 4.750009 1 0 0
## 893 20.554036 0 1 0
## 894 22.823349 0 0 1
## 895 24.762244 0 0 1
## 896 10.648186 0 1 0
## 897 23.871363 0 0 1
## 898 10.367860 0 1 0
## 899 6.329820 0 1 0
## 900 15.189626 0 1 0
## 901 5.976291 0 1 0
## 902 12.317339 0 1 0
## 903 6.947868 0 1 0
## 904 4.941869 0 1 0
## 905 15.514989 0 1 0
## 906 14.195454 0 1 0
## 907 5.464722 1 0 0
## 908 14.618431 0 1 0
## 909 11.138940 0 1 0
## 910 5.988701 0 1 0
## 911 43.108942 0 0 1
## 912 65.256348 0 0 1
## 913 16.115206 0 1 0
## 914 18.724428 0 1 0
## 915 19.504088 0 1 0
## 916 21.267939 0 1 0
## 917 10.532710 0 1 0
## 918 7.835648 0 1 0
## 919 25.251752 0 0 1
## 920 6.908257 1 0 0
## 921 23.725271 0 0 1
## 922 10.502219 0 1 0
## 923 10.912804 0 1 0
## 924 11.527500 0 1 0
## 925 27.825503 0 0 1
## 926 8.280363 0 1 0
## 927 4.869858 1 0 0
## 928 13.850020 0 1 0
## 929 14.654846 0 1 0
## 930 9.572560 0 1 0
## 931 7.965899 0 1 0
## 932 30.055401 0 0 1
## 933 3.603643 0 1 0
## 934 10.544896 0 1 0
## 935 15.687967 0 1 0
## 936 7.177912 1 0 0
## 937 24.865832 0 0 1
## 938 11.405184 0 1 0
## 939 8.465423 0 1 0
## 940 11.784510 0 1 0
## 941 5.518194 0 1 0
## 942 8.384198 0 1 0
## 943 7.278449 0 1 0
## 944 12.736470 0 1 0
## 945 7.794445 0 1 0
## 946 6.857016 0 1 0
## 947 13.327265 0 1 0
## 948 7.500974 0 1 0
## 949 13.490611 0 1 0
## 950 15.885988 0 1 0
## 951 13.614006 0 1 0
## 952 6.582628 0 1 0
## 953 12.696623 0 1 0
## 954 8.978479 0 1 0
## 955 12.763782 0 1 0
## 956 9.187565 0 1 0
## 957 32.019634 0 0 1
## 958 3.521555 1 0 0
## 959 7.157815 0 1 0
## 960 15.767617 0 1 0
## 961 16.116227 0 1 0
## 962 6.016922 1 0 0
## 963 8.235833 0 1 0
## 964 32.608541 0 0 1
## 965 15.663254 0 1 0
## 966 21.365494 0 1 0
## 967 7.160574 0 1 0
## 968 6.855391 0 1 0
## 969 4.694804 0 1 0
## 970 15.939128 0 1 0
## 971 3.857824 1 0 0
## 972 6.166271 0 1 0
## 973 20.771148 0 1 0
## 974 4.423915 0 1 0
## 975 17.866789 0 1 0
## 976 4.200946 1 0 0
## 977 6.432407 0 1 0
## 978 6.175553 1 0 0
## 979 5.128300 0 1 0
## 980 10.450994 0 1 0
## 981 11.416782 0 1 0
## 982 8.803657 0 1 0
## 983 31.403393 0 0 1
## 984 13.278902 0 1 0
## 985 6.555913 0 1 0
## 986 7.227248 0 1 0
## 987 4.453095 0 1 0
## 988 7.699789 0 1 0
## 989 17.495202 0 1 0
## 990 20.153341 0 1 0
## 991 9.475116 1 0 0
## 992 5.621459 1 0 0
## 993 8.273977 0 1 0
## 994 15.071176 0 1 0
## 995 31.970325 0 0 1
## 996 4.693336 1 0 0
## 997 21.384568 0 1 0
## 998 4.486610 1 0 0
## 999 14.045782 0 1 0
## 1000 8.282799 0 1 0
step.mod <- lm(y~c1+c2+c3)
plot(data.x,y,xlim=c(-2,5), ylim=c(-10,70))
lines(data.x,lin.mod$fitted.values,col="green", lwd =1)
lines(data.x[ix], step.mod$fitted.values[ix],col="orange", lwd =2)summary(step.mod)##
## Call:
## lm(formula = y ~ c1 + c2 + c3)
##
## Residuals:
## Min 1Q Median 3Q Max
## -10.500 -3.845 -0.599 2.556 40.110
##
## Coefficients: (1 not defined because of singularities)
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 29.5958 0.4199 70.48 <2e-16 ***
## c11 -24.3587 0.5864 -41.54 <2e-16 ***
## c21 -18.4193 0.4640 -39.70 <2e-16 ***
## c31 NA NA NA NA
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 5.177 on 997 degrees of freedom
## Multiple R-squared: 0.6681, Adjusted R-squared: 0.6675
## F-statistic: 1004 on 2 and 997 DF, p-value: < 2.2e-16
2.5 Perbandingan Hasil Model
Perbandingan model dapat dilihat secara eksploratif dengan plot-plot di atas. Untuk lebih akurat, komparasi model dapat dilakukan dengan melihat nilai AIC (Akaike’s Information Criterion) dan MSE (Mean Squared Error). Semakin baik model maka semakin kecil nilai AIC atau MSE setiap modelnya.
MSE = function(pred,actual){
mean((pred-actual)^2)
}AIC <- rbind(AIC(lin.mod),
AIC(pol.mod),
AIC(step.mod))
MSE <- rbind(MSE(predict(lin.mod),y),
MSE(predict(pol.mod),y),
MSE(predict(step.mod),y))
Model <- c("Linear","Polynomial (ordo=2)","Tangga (breaks=3)")
model.bangkitan1 <- data.frame(Model,AIC,MSE)
knitr::kable(model.bangkitan1)| Model | AIC | MSE |
|---|---|---|
| Linear | 5663.000 | 16.762150 |
| Polynomial (ordo=2) | 2883.470 | 1.038308 |
| Tangga (breaks=3) | 6131.262 | 26.719266 |
2.6 Piecewise Cubic Polynomial
dt.all <- cbind(y,data.x)
##knots=1
dt1 <- dt.all[data.x <=1,]
dim(dt1)## [1] 491 2
dt2 <- dt.all[data.x >1,]
dim(dt2)## [1] 509 2
plot(data.x,y,xlim=c(-2,5), ylim=c(-10,70), pch=16, col="orange")
cub.mod1 <- lm(dt1[,1]~dt1[,2]+I(dt1[,2]^2)+I(dt1[,2]^3))
ix <- sort(dt1[,2], index.return=T)$ix
lines(dt1[ix,2],cub.mod1$fitted.values[ix],col="blue", lwd=2)
cub.mod2 <- lm(dt2[,1]~dt2[,2]+I(dt2[,2]^2)+I(dt2[,2]^3))
ix <- sort(dt2[,2], index.return=T)$ix
lines(dt2[ix,2],cub.mod2$fitted.values[ix],col="blue", lwd=2)2.7 Truncated Power Basis
#dengan menggunakan truncated power basis
plot(data.x,y,xlim=c( -2,5), ylim=c( -10,70), pch=16,col="orange")
abline(v=1,col="red", lty=2)
hx <- ifelse( data.x>1,(data.x-1)^3,0)
cubspline.mod <- lm( y~data.x+I(data.x^2)+I(data.x^3)+hx)
ix <- sort( data.x,index.return=T)$ix
lines( data.x[ix], cubspline.mod$fitted.values[ix],col="green", lwd=2)2.8 Perbandingan dengan K-Fold CV
plot( data.x,y,xlim=c(-2,5), ylim=c( -10,70), pch=16,col="orange")
abline(v=0,col="red", lty=2)
abline(v=2,col="red", lty=2)
hx1 <- ifelse( data.x>0,(data.x-0)^3,0)
hx2 <- ifelse( data.x>2,(data.x-2)^3,0)
cubspline.mod2 <- lm( y~data.x+I(data.x^2)+I(data.x^3)+hx1+hx2)
ix <- sort( data.x,index.return=T)$ix
lines( data.x[ix],cubspline.mod2$fitted.values[ix],col="green", lwd=2)2.9 Perbandingan 1 Knot dan 2 Knot dengan 5-Fold CV
##1 knot
data.all <- cbind( y,data.x,hx)
set.seed(456)
ind <- sample(1:5,length( data.x),replace=T)
res <- c()
for( i in 1:5){
dt.train <- data.all[ ind!=i,]
x.test <- data.all[ ind==i, -1]
y.test <- data.all[ ind==i,1]
mod1 <- lm( dt.train[,1]~dt.train[,2]+I( dt.train[,2]^2)+I( dt.train[,2]^3)+dt.train[,3])
x.test.olah <- cbind(1,x.test[,1], x.test[,1]^2,x.test[,1]^3,x.test[,2])
beta <- coefficients(mod1)
prediksi <- x.test.olah%*%beta
res <- c( res,mean(( y.test-prediksi)^2))
}
res## [1] 0.951317 1.016440 1.025993 1.077070 1.145546
mean(res)## [1] 1.043273
##2 knot (knot = 0, 2)
data.all2 <- cbind(y,data.x,hx1,hx2)
set.seed(456)
ind2 <- sample(1:5,length( data.x),replace=T)
res2 <- c()
for( i in 1:5){
dt.train2 <- data.all2[ind2!=i,]
x.test2 <- data.all2[ind2==i,-1]
y.test2 <- data.all2[ind2==i,1]
mod2 <- lm(dt.train2[,1]~dt.train2[,2]+I(dt.train2[,2]^2)+I(dt.train2[,2]^3)+dt.train2[,3]+dt.train2[,4])
x.test.olah2 <- cbind(1,x.test2[,1],x.test2[,1]^2,x.test2[,1]^3,x.test2[,2],x.test2[,3])
beta2 <- coefficients(mod2)
prediksi2 <- x.test.olah2%*%beta2
res2 <- c(res2,mean((y.test2-prediksi2)^2))
}
res2## [1] 0.9689951 1.0187757 1.0352576 1.0764533 1.1606197
mean(res2)## [1] 1.05202
2.10 Smoothing Spline
ss1 <- smooth.spline(data.x,y,all.knots = T)
plot(data.x,y,xlim=c(-2,5), ylim=c(-10,70), pch=16,col="orange")
lines(ss1,col="purple", lwd=2)3. Contoh Data TRICEPS
Penjelasan Data
Data yang digunakan untuk ilustrasi berasal dari studi antropometri terhadap 892 perempuan di bawah 50 tahun di tiga desa Gambia di Afrika Barat. Data terdiri dari 3 Kolom yaitu Age, Intriceps dan tricepts. Berikut adalah penjelasannya pada masing-masing kolom:
age : umur responden Intriceps : logaritma dari ketebalan lipatan kulit triceps triceps: ketebalan lipatan kulit triceps
Lipatan kulit trisep diperlukan untuk menghitung lingkar otot lengan atas. Ketebalannya memberikan informasi tentang cadangan lemak tubuh, sedangkan massa otot yang dihitung memberikan informasi tentang cadangan protein
data("triceps", package="MultiKink")Jika kita gambarkan dalam bentuk scatterplot
ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black") +
theme_bw() Berdasarkan pola hubungan yang terlihat pada scatterplot, kita akan mencoba untuk mencari model yang bisa merepresentasikan pola hubungan tersebut dengan baik.
Regresi Linear
mod_linear = lm(triceps~age,data=triceps)
summary(mod_linear)##
## Call:
## lm(formula = triceps ~ age, data = triceps)
##
## Residuals:
## Min 1Q Median 3Q Max
## -12.9512 -2.3965 -0.5154 1.5822 25.1233
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 6.19717 0.21244 29.17 <2e-16 ***
## age 0.21584 0.01014 21.28 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 4.007 on 890 degrees of freedom
## Multiple R-squared: 0.3372, Adjusted R-squared: 0.3365
## F-statistic: 452.8 on 1 and 890 DF, p-value: < 2.2e-16
ringkasan_linear <- summary(mod_linear)
ringkasan_linear$r.squared## [1] 0.3372092
AIC(mod_linear)## [1] 5011.515
tabel_data <- rbind(data.frame("y_aktual"=triceps$triceps,
"y_pred"=predict(mod_linear),
"residuals"=residuals(mod_linear)))
tabel_data## y_aktual y_pred residuals
## 1 3.4 8.798074 -5.398073843
## 2 4.0 8.336171 -4.336171174
## 3 4.2 8.364231 -4.164230898
## 4 4.2 8.677202 -4.477202306
## 5 4.4 8.381498 -3.981497986
## 6 4.4 8.759222 -4.359222149
## 7 4.8 8.552014 -3.752013362
## 8 4.8 8.362072 -3.562072044
## 9 4.8 8.575756 -3.775756155
## 10 5.0 8.355597 -3.355597021
## 11 5.0 8.649143 -3.649142581
## 12 5.0 8.653460 -3.653459528
## 13 5.2 8.573598 -3.373598063
## 14 5.2 8.353439 -3.153438738
## 15 5.2 8.346963 -3.146963525
## 16 5.2 8.772173 -3.572173068
## 17 5.2 8.627558 -3.427558658
## 18 5.2 8.372864 -3.172864585
## 19 5.4 8.353439 -2.953438452
## 20 5.4 8.806708 -3.406707529
## 21 5.4 8.441934 -3.041933794
## 22 5.4 8.355597 -2.955596925
## 23 5.4 8.573598 -3.173597777
## 24 5.4 8.754906 -3.354905408
## 25 5.4 8.580073 -3.180072991
## 26 5.4 8.441934 -3.041933794
## 27 5.5 8.657776 -3.157776268
## 28 5.6 8.716054 -3.116053905
## 29 5.6 8.703103 -3.103103271
## 30 5.6 8.364231 -2.764230803
## 31 5.6 8.340488 -2.740488216
## 32 5.6 8.495894 -2.895894580
## 33 5.8 8.817500 -3.017499594
## 34 5.8 8.465677 -2.665676493
## 35 5.8 8.478627 -2.678626920
## 36 5.8 8.463518 -2.663518019
## 37 5.8 8.830450 -3.030450022
## 38 6.0 8.782965 -2.782964831
## 39 6.0 8.493736 -2.493736217
## 40 6.0 8.700945 -2.700944909
## 41 6.0 8.422508 -2.422508249
## 42 6.0 8.485103 -2.485102530
## 43 6.0 8.638351 -2.638350627
## 44 6.0 8.435459 -2.435458676
## 45 6.2 8.703103 -2.503103367
## 46 6.2 8.653460 -2.453459719
## 47 6.2 8.450568 -2.250567768
## 48 6.2 8.614608 -2.414608025
## 49 6.4 8.785123 -2.385123209
## 50 6.4 8.666410 -2.266409860
## 51 6.4 8.383657 -1.983656459
## 52 6.6 8.459201 -1.859201359
## 53 6.6 8.666410 -2.066410051
## 54 6.6 8.657776 -2.057776364
## 55 6.6 8.670727 -2.070726997
## 56 6.8 8.763539 -1.963539000
## 57 6.8 8.644826 -1.844825650
## 58 6.8 8.830450 -2.030450022
## 59 7.0 8.495894 -1.495894484
## 60 7.0 8.463518 -1.463518210
## 61 7.0 8.467835 -1.467835156
## 62 7.0 8.651301 -1.651301055
## 63 7.0 8.605974 -1.605974147
## 64 7.0 8.782965 -1.782964831
## 65 7.0 8.599499 -1.599498933
## 66 7.0 8.504528 -1.504528171
## 67 7.2 8.461360 -1.261359928
## 68 7.2 8.731163 -1.531162901
## 69 7.4 8.815341 -1.415341216
## 70 7.4 8.413875 -1.013874466
## 71 8.0 8.316745 -0.316745327
## 72 8.0 8.787282 -0.787281778
## 73 8.4 8.651301 -0.251301436
## 74 8.6 8.683678 -0.083677153
## 75 8.8 8.789440 0.010559940
## 76 8.8 8.336171 0.463829017
## 77 9.0 8.754906 0.245094497
## 78 9.0 8.828292 0.171708261
## 79 9.8 8.834767 0.965233032
## 80 9.8 8.694469 1.105530702
## 81 10.0 8.623242 1.376758479
## 82 10.2 8.642667 1.557332442
## 83 10.2 8.547697 1.652302997
## 84 13.0 8.474310 4.525689630
## 85 14.2 8.452726 5.747273759
## 86 16.0 8.631875 7.368124792
## 87 9.4 14.077578 -4.677578494
## 88 8.2 6.840384 1.359616282
## 89 21.2 13.043693 8.156307429
## 90 9.6 7.116662 2.483338564
## 91 10.2 11.457252 -1.257252372
## 92 13.2 10.406100 2.793900193
## 93 13.6 14.986275 -1.386274771
## 94 12.2 10.026217 2.173782828
## 95 6.8 10.065069 -3.265068484
## 96 6.4 10.097445 -3.697444854
## 97 14.0 9.352789 4.647211215
## 98 6.0 7.876427 -1.876426986
## 99 11.2 10.108237 1.091762494
## 100 7.4 7.382148 0.017852251
## 101 8.2 11.560856 -3.360856615
## 102 13.4 11.850085 1.549914374
## 103 17.6 11.191766 6.408234639
## 104 15.6 15.633802 -0.033801906
## 105 4.2 7.766347 -3.566347514
## 106 14.0 10.123346 3.876653784
## 107 8.2 7.013057 1.186942389
## 108 8.2 10.932755 -2.732755326
## 109 21.8 12.139314 9.660685173
## 110 7.8 6.682819 1.117181603
## 111 6.0 6.531729 -0.531728912
## 112 9.2 9.149897 0.050102770
## 113 6.4 7.133929 -0.733929096
## 114 17.4 14.915047 2.484952846
## 115 8.6 7.371356 1.228644594
## 116 5.8 7.248326 -1.448325404
## 117 8.0 8.959956 -0.959955722
## 118 10.0 12.860228 -2.860227641
## 119 13.6 14.120747 -0.520746372
## 120 7.6 8.182923 -0.582923172
## 121 8.2 6.421649 1.778350508
## 122 8.0 6.892186 1.107814299
## 123 9.0 7.403732 1.596267836
## 124 23.4 12.048660 11.351339370
## 125 6.4 7.054067 -0.654067389
## 126 9.0 11.297528 -2.297528459
## 127 5.2 8.867143 -3.667143624
## 128 5.8 8.131121 -2.331120765
## 129 8.2 10.414734 -2.214733700
## 130 4.2 17.151174 -12.951174136
## 131 4.2 7.677852 -3.477852172
## 132 22.6 10.552872 12.047127882
## 133 6.8 8.107378 -1.307378177
## 134 9.4 9.106728 0.293271219
## 135 8.0 13.037218 -5.037218326
## 136 11.0 10.395308 0.604692338
## 137 11.8 10.473011 1.326989552
## 138 8.4 6.501511 1.898488636
## 139 15.8 11.161548 4.638452249
## 140 17.4 11.068736 6.331263966
## 141 6.0 6.570581 -0.570580555
## 142 4.4 8.012408 -3.612407511
## 143 12.6 12.609850 -0.009849722
## 144 10.6 13.943756 -3.343755687
## 145 9.2 6.365530 2.834469525
## 146 7.8 6.365530 1.434469906
## 147 6.0 6.510145 -0.510144695
## 148 7.2 7.669218 -0.469218485
## 149 16.2 11.146439 5.053561722
## 150 5.2 7.496544 -2.296544541
## 151 6.6 7.129612 -0.529612443
## 152 6.0 8.267102 -2.267101679
## 153 6.2 6.678502 -0.478501935
## 154 13.6 10.453585 3.146415590
## 155 8.0 10.978082 -2.978081837
## 156 9.0 6.454026 2.545974322
## 157 11.2 11.105429 0.094570936
## 158 10.2 12.579632 -2.379632493
## 159 6.2 7.844051 -1.644050799
## 160 5.8 7.310920 -1.510919685
## 161 8.4 9.283719 -0.883719671
## 162 14.6 13.691221 0.908779500
## 163 6.6 7.295811 -0.695811071
## 164 9.6 9.303145 0.296855245
## 165 5.2 7.073493 -1.873493471
## 166 10.4 9.307462 1.092537741
## 167 7.4 9.244868 -1.844867500
## 168 10.2 6.777789 3.422210563
## 169 6.8 7.969239 -1.169238981
## 170 24.8 17.367016 7.432982913
## 171 19.0 14.122906 4.877094362
## 172 3.6 8.992332 -5.392332092
## 173 8.2 7.082127 1.117872842
## 174 7.0 7.338979 -0.338979410
## 175 6.2 9.020392 -2.820391721
## 176 4.6 7.798724 -3.198723796
## 177 9.4 6.833908 2.566091356
## 178 6.6 9.326888 -2.726887819
## 179 5.6 8.858510 -3.258509842
## 180 6.4 7.721020 -1.321020320
## 181 21.6 13.995559 7.604441780
## 182 15.2 17.367016 -2.167016514
## 183 12.4 12.109096 0.290903767
## 184 5.8 7.772823 -1.972822449
## 185 13.6 13.693379 -0.093378561
## 186 19.4 14.241619 5.158380837
## 187 7.0 9.020392 -2.020391530
## 188 6.8 9.119679 -2.319678842
## 189 7.0 9.132630 -2.132629666
## 190 10.4 9.430492 0.969507652
## 191 7.0 6.404382 0.595618086
## 192 14.2 13.330764 0.869236128
## 193 8.8 9.093778 -0.293777781
## 194 9.2 6.941829 2.258170357
## 195 6.4 9.074352 -2.674352029
## 196 6.8 8.148388 -1.348388138
## 197 5.0 8.938371 -3.938371402
## 198 7.2 8.269260 -1.069260342
## 199 7.6 9.708929 -2.108928928
## 200 8.2 9.197382 -0.997382405
## 201 8.6 7.302286 1.297714193
## 202 5.4 6.954780 -1.554779887
## 203 6.8 9.255660 -2.455659565
## 204 5.0 8.247676 -3.247675832
## 205 8.4 6.546838 1.853161729
## 206 24.8 12.773890 12.026108876
## 207 5.4 7.686486 -2.286485573
## 208 6.8 6.831750 -0.031749650
## 209 8.0 7.246167 0.753832776
## 210 6.2 9.272927 -3.072927320
## 211 6.4 6.963414 -0.563413574
## 212 7.0 6.516620 0.483380040
## 213 6.6 9.698136 -3.098136562
## 214 7.2 6.590006 0.609993433
## 215 9.0 9.756414 -0.756414008
## 216 7.0 9.596691 -2.596690697
## 217 7.8 9.683028 -1.883027376
## 218 11.2 13.546606 -2.346606250
## 219 12.2 6.980681 5.219318715
## 220 16.0 9.706770 6.293229640
## 221 11.0 15.849645 -4.849644666
## 222 13.4 14.409976 -1.009975955
## 223 6.0 9.639859 -3.639859132
## 224 8.2 12.270978 -4.070977826
## 225 18.2 14.772591 3.427409928
## 226 5.8 8.118170 -2.318170131
## 227 7.6 7.453376 0.146623989
## 228 6.6 6.339629 0.260370693
## 229 7.0 7.347613 -0.347613097
## 230 9.8 9.836276 -0.036275678
## 231 6.0 7.733971 -1.733970946
## 232 17.2 13.848786 3.351215044
## 233 9.8 6.715195 3.084805226
## 234 11.0 9.434809 1.565191087
## 235 6.2 9.432650 -3.232650631
## 236 12.0 12.914188 -0.914188236
## 237 10.4 6.766997 3.633002481
## 238 9.2 15.849645 -6.649644857
## 239 7.6 6.421649 1.178350604
## 240 15.4 14.483362 0.916637604
## 241 6.0 8.157022 -2.157022016
## 242 8.4 6.497194 1.902805480
## 243 7.4 15.849645 -8.449644571
## 244 19.0 10.278753 8.721247420
## 245 7.4 9.654968 -2.254968143
## 246 9.6 13.652369 -4.052368806
## 247 16.2 11.539272 4.660728659
## 248 12.2 10.600358 1.599642134
## 249 10.8 11.776699 -0.976698612
## 250 4.8 8.085794 -3.285793857
## 251 14.0 10.090970 3.909030058
## 252 9.2 9.542730 -0.342730293
## 253 5.4 7.936863 -2.536862802
## 254 5.6 7.278544 -1.678543697
## 255 7.4 9.572948 -2.172948014
## 256 5.0 9.156372 -4.156372253
## 257 10.0 9.566473 0.433527310
## 258 7.2 8.970748 -1.770748073
## 259 7.8 6.466976 1.333023982
## 260 9.2 11.373073 -2.173073564
## 261 7.2 14.552432 -7.352431701
## 262 5.4 7.995140 -2.595140137
## 263 7.8 7.060543 0.739457441
## 264 6.0 7.528921 -1.528920728
## 265 12.0 9.182273 2.817726686
## 266 8.6 6.309411 2.290589113
## 267 10.4 10.026217 0.373782637
## 268 12.6 10.729863 1.870137197
## 269 8.0 10.276594 -2.276594107
## 270 8.6 7.166305 1.433694916
## 271 5.0 13.853103 -8.853102665
## 272 8.2 9.037659 -0.837659095
## 273 9.2 7.205157 1.994842649
## 274 3.2 9.777998 -6.577998280
## 275 16.8 12.074561 4.725438133
## 276 6.0 7.867793 -1.867793196
## 277 12.2 12.473870 -0.273869777
## 278 9.0 14.768274 -5.768273889
## 279 16.2 14.125064 2.074937063
## 280 9.4 10.738497 -1.338497459
## 281 7.0 8.893044 -1.893044494
## 282 9.0 14.770433 -5.770432774
## 283 11.0 14.768274 -3.768273889
## 284 15.6 14.768274 0.831726493
## 285 9.0 9.691661 -0.691661459
## 286 15.4 10.280911 5.119088565
## 287 11.0 12.184641 -1.184640766
## 288 12.0 13.907063 -1.907063260
## 289 8.2 6.387115 1.812885282
## 290 6.4 9.223283 -2.823283386
## 291 29.2 13.963182 15.236818847
## 292 7.2 8.888728 -1.688727944
## 293 24.8 12.398325 12.401674566
## 294 10.6 9.711087 0.888913075
## 295 13.0 13.395516 -0.395516230
## 296 7.8 7.114503 0.685496846
## 297 13.0 13.786191 -0.786191231
## 298 10.0 14.392709 -4.392708611
## 299 7.0 7.300128 -0.300127819
## 300 7.8 15.202118 -7.402117340
## 301 5.2 7.207316 -2.007315721
## 302 14.0 14.321480 -0.321480231
## 303 15.0 15.417960 -0.417959909
## 304 4.0 9.175798 -5.175798100
## 305 11.8 7.153355 4.646645255
## 306 8.0 9.639859 -1.639859132
## 307 10.2 6.797215 3.402784768
## 308 14.2 9.182273 5.017726495
## 309 12.0 15.847486 -3.847485781
## 310 11.4 8.912470 2.487529278
## 311 12.0 11.081686 0.918313920
## 312 6.6 6.943988 -0.343987969
## 313 8.2 7.205157 0.994842649
## 314 20.2 15.199959 5.000042117
## 315 14.6 16.065487 -1.465486663
## 316 14.2 16.274854 -2.074853783
## 317 10.2 6.413016 3.786984195
## 318 10.0 16.495013 -6.495012917
## 319 20.2 15.851803 4.348198035
## 320 11.0 15.849645 -4.849644666
## 321 8.4 6.855492 1.544507139
## 322 14.0 12.180324 1.819676180
## 323 10.2 6.859809 3.340190486
## 324 9.8 6.803690 2.996309884
## 325 5.2 7.518129 -2.318128758
## 326 19.0 14.701363 4.298636722
## 327 6.2 9.171481 -2.971481345
## 328 8.2 6.883552 1.316447796
## 329 6.6 12.031393 -5.431392970
## 330 7.2 6.341788 0.858212176
## 331 6.4 9.708929 -3.308928738
## 332 15.2 10.529130 4.670870103
## 333 6.6 7.151197 -0.551196661
## 334 13.2 9.965781 3.234218842
## 335 27.0 16.935332 10.064668433
## 336 9.0 16.931015 -7.931014620
## 337 13.8 16.926698 -3.126697483
## 338 19.0 15.204276 3.795724408
## 339 9.4 14.511422 -5.111422136
## 340 9.8 6.572739 3.227261214
## 341 30.4 13.958865 16.441134649
## 342 8.4 7.129612 1.270387271
## 343 8.4 6.486402 1.913597588
## 344 14.2 9.331205 4.868795139
## 345 6.4 8.191557 -1.791556668
## 346 9.2 6.438917 2.761083109
## 347 6.2 7.218108 -1.018107881
## 348 9.0 7.390782 1.609218366
## 349 7.0 12.402642 -5.402641618
## 350 8.2 7.712387 0.487613081
## 351 6.0 7.824625 -1.824624761
## 352 8.2 7.077810 1.122189686
## 353 8.2 6.898661 1.301338843
## 354 9.8 15.847486 -6.047485591
## 355 10.0 15.847486 -5.847485781
## 356 6.2 7.518129 -1.318128758
## 357 7.6 9.346314 -1.746313666
## 358 5.8 9.799583 -3.999582458
## 359 11.4 12.180324 -0.780324201
## 360 6.8 8.923263 -2.123262310
## 361 17.0 15.417960 1.582040091
## 362 20.6 16.497172 4.102828580
## 363 12.4 10.900379 1.499620757
## 364 7.6 8.286528 -0.686527621
## 365 7.0 9.393799 -2.393798952
## 366 11.4 10.166514 1.233485174
## 367 9.2 6.479927 2.720073070
## 368 6.4 9.136946 -2.736946311
## 369 19.8 11.204717 8.595282655
## 370 8.8 6.788581 2.011418836
## 371 10.2 13.697696 -3.497696080
## 372 27.4 13.475379 13.924621116
## 373 5.0 11.955848 -6.955847960
## 374 21.0 16.065487 4.934512955
## 375 11.8 17.142540 -5.342539862
## 376 5.6 9.534096 -3.934096511
## 377 4.6 13.978291 -9.378290912
## 378 12.4 11.964481 0.435518177
## 379 19.2 13.598409 5.601592170
## 380 8.0 12.173849 -4.173848812
## 381 7.6 6.760522 0.839478084
## 382 8.8 6.823116 1.976884037
## 383 8.4 6.883552 1.516447605
## 384 8.2 6.270560 1.929440164
## 385 6.2 7.187890 -0.987889977
## 386 7.8 7.321712 0.478288155
## 387 7.6 14.552432 -6.952431606
## 388 20.2 15.208593 4.991408224
## 389 21.5 13.296229 8.203771067
## 390 18.2 12.098304 6.101696866
## 391 17.2 12.070244 5.129756606
## 392 6.0 7.503020 -1.503019667
## 393 8.4 6.462659 1.937340253
## 394 11.8 9.911820 1.888179818
## 395 6.0 6.633175 -0.633174836
## 396 4.2 9.510354 -5.310354019
## 397 7.4 9.415383 -2.015382971
## 398 6.2 7.295811 -1.095811166
## 399 7.6 6.691452 0.908547630
## 400 11.2 11.355806 -0.155806191
## 401 7.0 8.157022 -1.157022016
## 402 6.8 6.831750 -0.031749650
## 403 18.2 15.417960 2.782040854
## 404 6.4 6.704403 -0.304402709
## 405 8.0 7.138246 0.861753965
## 406 9.0 6.413016 2.586984386
## 407 8.8 14.856769 -6.056769040
## 408 9.2 10.460060 -1.260060402
## 409 8.0 6.721670 1.278329770
## 410 5.8 7.807357 -2.007357197
## 411 20.4 11.770223 8.629776236
## 412 15.8 12.262344 3.537656037
## 413 18.0 9.713246 8.286754221
## 414 19.0 11.563015 7.436985103
## 415 9.2 11.595391 -2.395391362
## 416 16.0 11.647193 4.352806707
## 417 4.6 8.264943 -3.664943301
## 418 7.4 9.244868 -1.844867500
## 419 6.4 9.838434 -3.438434247
## 420 23.4 9.335521 14.064478208
## 421 6.2 7.556980 -1.356980452
## 422 17.8 13.261694 4.538305051
## 423 13.2 13.261694 -0.061694376
## 424 6.4 7.779298 -1.379297758
## 425 6.2 7.921754 -1.721753981
## 426 7.8 6.372006 1.427994628
## 427 8.8 9.283719 -0.483719099
## 428 8.0 7.015216 0.984784158
## 429 8.0 6.417332 1.582667542
## 430 11.2 6.725987 4.474012736
## 431 6.6 8.163497 -1.563497325
## 432 7.6 7.090761 0.509239251
## 433 5.0 9.262135 -4.262134969
## 434 14.8 9.631225 5.168774746
## 435 4.0 8.847718 -4.847717586
## 436 6.0 7.921754 -1.921753791
## 437 6.2 6.902978 -0.702978000
## 438 13.8 13.661003 0.138997111
## 439 36.8 14.125064 22.674935537
## 440 10.0 10.993191 -0.993190738
## 441 14.0 13.477537 0.522463436
## 442 6.0 7.608782 -1.608782383
## 443 7.6 13.557398 -5.957398109
## 444 5.8 7.153355 -1.353354745
## 445 10.8 6.555472 4.244528614
## 446 26.2 13.477537 12.722464199
## 447 7.6 6.946146 0.653853609
## 448 7.4 7.418841 -0.018840970
## 449 5.8 7.684327 -1.884327004
## 450 9.2 13.473220 -4.273219808
## 451 5.6 7.932546 -2.332546046
## 452 10.4 9.713246 0.686753839
## 453 20.2 13.261694 6.938306577
## 454 11.4 10.665111 0.734888983
## 455 9.0 12.219176 -3.219175514
## 456 7.0 8.146230 -1.146229856
## 457 7.8 7.092919 0.707081064
## 458 10.4 8.297319 2.102680139
## 459 5.2 7.675694 -2.475693699
## 460 8.0 6.741096 1.258903975
## 461 7.0 9.430492 -2.430491967
## 462 4.6 8.023200 -3.423199861
## 463 14.8 13.693379 1.106621248
## 464 9.8 6.626700 3.173300620
## 465 13.2 12.232126 0.967873868
## 466 6.0 6.477768 -0.477768317
## 467 13.4 12.542939 0.857060537
## 468 21.4 13.475379 7.924621116
## 469 8.4 11.431351 -3.031351296
## 470 16.4 12.396166 4.003833420
## 471 17.2 14.086212 3.113788757
## 472 7.2 6.857651 0.342348908
## 473 12.0 9.883761 2.116238956
## 474 27.2 16.719489 10.480511575
## 475 5.4 8.893044 -3.493044399
## 476 5.8 6.926720 -1.126720309
## 477 6.2 7.997299 -1.797298896
## 478 8.8 6.717353 2.082646804
## 479 7.0 7.572089 -0.572089162
## 480 5.0 7.779298 -2.779297854
## 481 14.8 9.549205 5.250794874
## 482 6.0 9.115362 -3.115362292
## 483 12.8 9.976573 2.823426858
## 484 8.2 7.000107 1.199892919
## 485 16.0 12.314146 3.685853724
## 486 8.2 6.972047 1.227952453
## 487 8.8 6.993632 1.806368566
## 488 7.2 6.393590 0.806410003
## 489 26.0 14.768274 11.231726111
## 490 7.0 9.035500 -2.035500431
## 491 7.4 7.233217 0.166783402
## 492 11.2 13.641577 -2.441577424
## 493 7.6 7.060543 0.539457155
## 494 11.4 14.804968 -3.404967903
## 495 8.4 16.928857 -8.528856940
## 496 7.0 7.440425 -0.440425385
## 497 6.2 11.398975 -5.198974831
## 498 7.8 8.100903 -0.300902758
## 499 5.8 7.852684 -2.052684105
## 500 14.0 10.902537 3.097463077
## 501 5.8 8.133279 -2.333279238
## 502 9.6 6.499353 3.100647821
## 503 23.2 12.044343 11.155657460
## 504 8.2 8.299478 -0.099478144
## 505 9.6 6.648284 2.951716593
## 506 6.6 15.847486 -9.247485877
## 507 10.6 10.801091 -0.201090771
## 508 9.0 6.758363 2.241636601
## 509 5.4 7.334663 -1.934662471
## 510 5.6 7.513812 -1.913811819
## 511 15.0 11.966640 3.033360086
## 512 10.2 6.454026 3.745974131
## 513 9.0 6.400065 2.599934929
## 514 12.7 12.609850 0.090149706
## 515 13.2 10.039167 3.160832401
## 516 8.6 6.931037 1.668963038
## 517 6.4 7.967081 -1.567080603
## 518 7.4 6.410857 0.989142903
## 519 8.2 11.893253 -3.693253664
## 520 6.6 9.581582 -2.981581892
## 521 4.2 8.154864 -3.954863734
## 522 9.8 15.631643 -5.831643212
## 523 17.5 13.924330 3.575669778
## 524 8.4 10.293861 -1.893861862
## 525 8.2 6.922404 1.277596204
## 526 7.0 6.525254 0.474746353
## 527 8.2 9.044134 -0.844134308
## 528 11.4 13.477537 -2.077536946
## 529 6.8 7.272068 -0.472068094
## 530 13.4 15.849645 -2.449645048
## 531 9.2 7.341138 1.858861926
## 532 14.4 12.178165 2.221834272
## 533 11.4 16.067645 -4.667645488
## 534 11.2 11.496103 -0.296103654
## 535 27.2 12.594741 14.605259560
## 536 8.4 6.833908 1.566091356
## 537 9.0 6.898661 2.101339034
## 538 12.6 15.199959 -2.599958264
## 539 10.6 6.479927 4.120073642
## 540 8.6 14.984116 -6.384115886
## 541 7.2 7.397257 -0.197257039
## 542 7.2 7.395098 -0.195098668
## 543 5.3 13.259536 -7.959535933
## 544 6.6 8.910312 -2.310311963
## 545 16.0 7.032483 8.967516784
## 546 8.0 10.062910 -2.062910202
## 547 7.8 7.423158 0.376842282
## 548 8.8 11.720580 -2.920579544
## 549 18.2 16.065487 2.134513718
## 550 10.0 12.583949 -2.583949249
## 551 9.2 12.374582 -3.174582481
## 552 11.4 6.516620 4.883379659
## 553 8.0 9.398116 -1.398115693
## 554 16.0 14.986275 1.013724848
## 555 15.6 10.065069 5.534931706
## 556 7.0 7.334663 -0.334662566
## 557 14.8 11.003983 3.796017087
## 558 4.0 11.601867 -7.601866591
## 559 24.0 15.202118 8.797882469
## 560 10.0 10.496753 -0.496753432
## 561 8.8 6.890027 1.909972912
## 562 6.8 8.979382 -2.179381378
## 563 7.0 7.136088 -0.136087562
## 564 5.4 9.145580 -3.745579998
## 565 5.4 7.287177 -1.887177193
## 566 23.6 10.688853 12.911147364
## 567 6.0 7.882902 -1.882902200
## 568 7.0 8.215300 -1.215299557
## 569 36.6 11.476678 25.123320858
## 570 9.8 7.198682 2.601318348
## 571 27.2 15.417960 11.782040854
## 572 11.4 6.786423 4.613576685
## 573 6.8 6.404382 0.395618276
## 574 10.0 6.989315 3.010685219
## 575 15.8 10.116871 5.683129394
## 576 9.0 10.179465 -1.179465284
## 577 5.4 8.003774 -2.603773824
## 578 33.4 14.986275 18.413726374
## 579 8.6 6.443234 2.156766837
## 580 9.0 7.071335 1.928665090
## 581 11.0 14.986275 -3.986275152
## 582 12.6 11.155073 1.444927860
## 583 8.0 6.853334 1.146665942
## 584 16.4 10.291703 6.108296611
## 585 10.2 6.419491 3.780508930
## 586 7.4 6.417332 0.982667638
## 587 6.8 8.087953 -1.287952330
## 588 8.0 6.432441 1.567558565
## 589 7.8 6.384956 1.415044085
## 590 9.2 15.415801 -6.215801215
## 591 6.8 6.313728 0.486272066
## 592 24.0 14.094846 9.905154102
## 593 15.0 14.584808 0.415191804
## 594 6.0 6.570581 -0.570580555
## 595 7.8 6.512303 1.287697074
## 596 7.0 9.970098 -2.970097913
## 597 8.4 9.037659 -0.637659285
## 598 8.0 7.710228 0.289771642
## 599 6.0 6.441075 -0.441075122
## 600 6.4 12.247235 -5.847235158
## 601 7.6 7.552663 0.047336487
## 602 11.8 10.062910 1.737089989
## 603 5.6 7.589357 -1.989356631
## 604 5.2 7.630367 -2.430366791
## 605 7.0 7.157672 -0.157671779
## 606 7.7 7.004424 0.695576076
## 607 8.8 6.445392 2.354608225
## 608 8.8 11.964481 -3.164481251
## 609 11.0 6.372006 4.627994437
## 610 11.4 14.213559 -2.813559423
## 611 6.0 8.046942 -2.046942354
## 612 5.6 6.982840 -1.382839611
## 613 8.4 6.333154 2.066845685
## 614 7.4 7.639000 -0.239000192
## 615 11.2 9.903187 1.296812918
## 616 7.0 10.375882 -3.375881815
## 617 9.0 6.415174 2.584825964
## 618 7.0 6.413016 0.586984386
## 619 8.6 6.410857 2.189143189
## 620 12.0 16.067645 -4.067645106
## 621 8.2 9.417542 -1.217541730
## 622 15.6 16.713014 -1.113013799
## 623 6.2 7.343296 -1.143296444
## 624 5.2 8.210983 -3.010982801
## 625 18.6 9.983049 8.616951629
## 626 9.2 12.024917 -2.824917646
## 627 4.4 7.941180 -3.541179542
## 628 4.8 7.615258 -2.815257509
## 629 8.4 9.262135 -0.862135351
## 630 10.0 10.714754 -0.714754284
## 631 6.2 7.533238 -1.333237762
## 632 5.4 6.754047 -1.354046460
## 633 6.8 6.374164 0.425836206
## 634 8.2 7.274227 0.925773154
## 635 9.2 8.994490 0.205509340
## 636 10.0 13.015634 -3.015634006
## 637 5.2 9.139105 -3.939105070
## 638 10.2 11.664461 -1.464460858
## 639 9.0 9.616117 -0.616116544
## 640 16.2 12.603375 3.596625667
## 641 9.8 6.721670 3.078329961
## 642 6.6 12.055135 -5.455135352
## 643 5.8 6.808007 -1.008006960
## 644 6.4 9.141263 -2.741263257
## 645 6.2 7.520287 -1.320287231
## 646 7.0 6.766997 0.233002862
## 647 11.2 6.529570 4.670429319
## 648 7.2 7.585040 -0.385039883
## 649 8.2 6.775631 1.424368985
## 650 9.0 6.782106 2.217893910
## 651 4.4 7.345455 -2.945454631
## 652 4.6 7.764189 -3.164189048
## 653 8.2 6.792898 1.407101611
## 654 7.2 7.794407 -0.594407048
## 655 14.2 11.273786 2.926213732
## 656 8.4 6.525254 1.874745972
## 657 8.0 6.553313 1.446686845
## 658 22.2 15.204276 6.995725171
## 659 7.6 6.546838 1.053162015
## 660 5.4 7.187890 -1.787889691
## 661 6.4 6.566264 -0.166263616
## 662 17.6 11.405450 6.194550733
## 663 7.4 8.139755 -0.739754547
## 664 6.8 7.507337 -0.707336320
## 665 5.2 16.065487 -10.865487235
## 666 6.2 7.960605 -1.760605675
## 667 5.2 8.232567 -3.032567122
## 668 9.8 6.529570 3.270429700
## 669 14.0 12.719930 1.280070234
## 670 8.0 7.326029 0.673971121
## 671 16.8 9.829800 6.970198787
## 672 9.2 9.311779 -0.111779014
## 673 8.0 9.898870 -1.898869945
## 674 17.6 10.447110 7.152890598
## 675 13.6 15.130889 -1.530888769
## 676 6.8 6.732462 0.067537852
## 677 9.0 10.021900 -1.021900035
## 678 11.0 11.970957 -0.970956861
## 679 9.0 9.413225 -0.413224593
## 680 14.0 11.299687 2.700313068
## 681 6.0 8.234725 -2.234725198
## 682 9.6 7.185731 2.414269069
## 683 9.2 10.887428 -1.687428213
## 684 7.2 9.719721 -2.519720978
## 685 11.6 6.905136 4.694864150
## 686 6.4 9.115362 -2.715362197
## 687 5.6 8.929738 -3.329737810
## 688 4.0 7.537554 -3.537554414
## 689 5.8 7.228900 -1.428899660
## 690 20.4 13.332923 7.067077053
## 691 19.8 14.125064 5.674935537
## 692 14.0 14.554590 -0.554590395
## 693 14.6 11.103270 3.496729981
## 694 10.2 9.199541 1.000459121
## 695 12.0 9.350630 2.649369688
## 696 7.0 6.326679 0.673321332
## 697 6.2 7.546188 -1.346188292
## 698 13.6 13.725756 -0.125755247
## 699 18.0 11.206875 6.793125357
## 700 11.0 10.701804 0.298196144
## 701 6.2 7.658426 -1.458426325
## 702 5.0 7.865635 -2.865634826
## 703 10.4 7.187890 3.212109833
## 704 9.2 9.754256 -0.554255725
## 705 7.0 7.526762 -0.526762254
## 706 5.6 6.348263 -0.748262993
## 707 15.2 17.360541 -2.160541507
## 708 14.4 12.186799 2.213200379
## 709 6.0 8.940530 -2.940529875
## 710 18.0 10.280911 7.719088946
## 711 17.0 13.475379 3.524621497
## 712 7.5 7.382148 0.117852156
## 713 12.8 15.847486 -3.047485591
## 714 7.0 6.810166 0.189834428
## 715 5.4 9.093778 -3.693777876
## 716 4.6 8.159180 -3.559180585
## 717 9.2 7.064860 2.135140216
## 718 12.0 10.436317 1.563682582
## 719 19.2 11.813392 7.386608740
## 720 11.0 9.184432 1.815568213
## 721 10.2 8.174289 2.025710419
## 722 6.0 7.444742 -1.444742229
## 723 9.0 11.401133 -2.401133113
## 724 7.8 7.477119 0.322881688
## 725 13.8 13.693379 0.106621248
## 726 8.2 15.417960 -7.217960100
## 727 8.6 7.496544 1.103456031
## 728 11.2 13.907063 -2.707063450
## 729 8.0 7.142563 0.857437122
## 730 13.0 8.888728 4.111272247
## 731 18.6 14.455302 4.144698106
## 732 8.2 7.392940 0.807059805
## 733 17.0 9.935563 7.064436834
## 734 8.2 8.299478 -0.099478144
## 735 6.4 7.768506 -1.368505701
## 736 12.4 14.915047 -2.515047154
## 737 6.6 6.676343 -0.076343418
## 738 15.0 14.986275 0.013724848
## 739 9.0 10.507545 -1.507545386
## 740 5.4 7.923912 -2.523912168
## 741 7.0 16.281329 -9.281329423
## 742 6.8 8.103061 -1.303061231
## 743 14.5 14.543798 -0.043797618
## 744 8.8 14.472570 -5.672569870
## 745 7.0 7.522445 -0.522445411
## 746 6.2 6.611591 -0.411590784
## 747 10.6 8.871460 1.728540002
## 748 6.8 6.527412 0.272588122
## 749 6.8 8.992332 -2.192331806
## 750 16.0 16.065487 -0.065487045
## 751 7.2 7.332504 -0.132504387
## 752 7.6 15.204276 -7.604275688
## 753 11.6 9.816850 1.783150359
## 754 11.0 13.924330 -2.924330222
## 755 8.0 7.202999 0.797001313
## 756 5.4 6.333154 -0.933153838
## 757 11.6 12.825692 -1.225692101
## 758 11.2 13.689062 -2.489062187
## 759 7.4 7.466326 -0.066326351
## 760 6.4 7.669218 -1.269218199
## 761 7.0 7.067018 -0.067018066
## 762 4.2 14.770433 -10.570432964
## 763 8.8 7.330346 1.469654468
## 764 6.2 6.669868 -0.469868248
## 765 6.6 7.423158 -0.823158004
## 766 8.4 6.590006 1.809993243
## 767 4.0 11.314796 -7.314795833
## 768 7.0 8.113854 -1.113853582
## 769 8.0 11.066577 -3.066577180
## 770 20.0 12.063769 7.936230850
## 771 19.0 15.202118 3.797882469
## 772 5.4 10.673744 -5.273744021
## 773 24.6 16.706538 7.893462032
## 774 5.0 8.005932 -3.005932392
## 775 11.0 9.801741 1.198258878
## 776 5.0 8.882252 -3.882252334
## 777 8.6 7.224583 1.375417375
## 778 12.3 16.713014 -4.413013990
## 779 6.0 7.835417 -1.835416922
## 780 10.6 9.864335 0.735665184
## 781 7.0 6.641809 0.358191477
## 782 13.6 14.768274 -1.168273507
## 783 7.6 12.137156 -4.537155686
## 784 13.6 14.697046 -1.097045951
## 785 7.8 6.993632 0.806368566
## 786 6.8 7.967081 -1.167080508
## 787 5.4 7.155513 -1.755513313
## 788 10.0 9.937722 0.062278361
## 789 5.6 7.524604 -1.924603979
## 790 5.6 9.348472 -3.748472140
## 791 28.0 11.621292 16.378707973
## 792 10.0 7.518129 2.481871433
## 793 9.4 6.922404 2.477596014
## 794 5.2 8.157022 -2.957022207
## 795 14.0 10.496753 3.503246568
## 796 9.8 14.554590 -4.754590204
## 797 6.4 6.507986 -0.107986178
## 798 7.4 7.582881 -0.182881227
## 799 6.0 7.561297 -1.561297105
## 800 5.0 17.144699 -12.144698937
## 801 8.4 8.295161 0.104838406
## 802 17.4 11.925630 5.474369460
## 803 9.4 9.497403 -0.097403576
## 804 9.2 12.035709 -2.835709600
## 805 17.8 11.491787 6.308212308
## 806 7.0 9.419700 -2.419700013
## 807 11.0 11.707629 -0.707629307
## 808 6.4 6.434600 -0.034599762
## 809 11.0 10.401783 0.598217330
## 810 7.0 7.701595 -0.701594569
## 811 10.2 9.855702 0.344298093
## 812 9.2 9.424017 -0.224016944
## 813 6.2 7.362722 -1.162722291
## 814 8.0 7.308762 0.691238494
## 815 5.8 7.483594 -1.683593629
## 816 24.0 10.239901 13.760099114
## 817 8.0 7.030325 0.969675206
## 818 7.8 6.486402 1.313598161
## 819 20.8 13.259536 7.540463113
## 820 8.6 11.010458 -2.410457730
## 821 11.6 10.578773 1.021227027
## 822 6.6 7.425316 -0.825316477
## 823 8.6 7.464168 1.135832409
## 824 12.0 13.697696 -1.697695889
## 825 5.8 7.440425 -1.640425195
## 826 7.8 7.088602 0.711397907
## 827 10.4 9.989524 0.410475858
## 828 14.2 15.849645 -1.649644857
## 829 5.0 9.387324 -4.387323532
## 830 7.8 11.170182 -3.370181643
## 831 7.0 6.853334 0.146665942
## 832 7.0 8.979382 -1.979381569
## 833 8.0 7.505178 0.494821963
## 834 6.8 6.533887 0.266112857
## 835 19.6 12.454444 7.145556642
## 836 18.0 14.986275 3.013724848
## 837 14.2 12.050819 2.149181087
## 838 15.2 12.374582 2.825417519
## 839 5.0 6.372006 -1.372005563
## 840 17.6 15.202118 2.397882851
## 841 15.2 13.186150 2.013850127
## 842 24.8 17.360541 7.439457921
## 843 8.8 14.498471 -5.698470725
## 844 6.0 9.048451 -3.048451064
## 845 8.0 9.721879 -1.721879260
## 846 6.0 7.328187 -1.328187250
## 847 9.0 7.541871 1.458128742
## 848 5.6 7.967081 -2.367080794
## 849 9.4 9.527621 -0.127621583
## 850 7.0 12.394008 -5.394007725
## 851 8.0 6.680660 1.319339834
## 852 7.4 9.264293 -1.864293347
## 853 16.4 10.934914 5.465086010
## 854 12.0 9.212491 2.787508679
## 855 5.8 7.787932 -1.987931350
## 856 14.8 9.521146 5.278854408
## 857 4.0 8.152705 -4.152705276
## 858 8.0 6.253292 1.746707748
## 859 4.4 8.072843 -3.672843319
## 860 29.6 11.532797 18.067203697
## 861 11.2 9.657127 1.542873098
## 862 11.0 6.898661 4.101339034
## 863 4.2 11.316954 -7.116954497
## 864 6.8 9.370056 -2.570055968
## 865 13.8 14.261045 -0.461044438
## 866 14.0 14.986275 -0.986275152
## 867 5.2 7.852684 -2.652684486
## 868 5.4 6.510145 -1.110144599
## 869 7.4 6.393590 1.006410290
## 870 14.4 13.963182 0.436817703
## 871 8.2 6.449709 1.750291000
## 872 6.2 7.846209 -1.646209169
## 873 7.0 8.947005 -1.947005089
## 874 22.6 12.635751 9.964249011
## 875 12.8 15.199959 -2.399958455
## 876 7.7 7.995140 -0.295140423
## 877 6.6 7.459851 -0.859851225
## 878 7.0 7.077810 -0.077810124
## 879 5.2 8.273577 -3.073577083
## 880 5.8 7.421000 -1.620999348
## 881 7.0 7.764189 -0.764188953
## 882 5.9 7.906645 -2.006644795
## 883 9.8 6.516620 3.283380231
## 884 7.0 10.147089 -3.147088598
## 885 8.4 6.905136 1.494863388
## 886 4.4 8.981540 -4.581539741
## 887 22.0 15.204276 6.795724408
## 888 5.2 12.057294 -6.857293921
## 889 6.8 7.904486 -1.104486226
## 890 4.2 7.921754 -3.721753981
## 891 9.0 9.354947 -0.354947258
## 892 8.2 12.702662 -4.502662583
ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black") +
stat_smooth(method = "lm",
formula = y~x,lty = 1,
col = "blue",se = F)+
theme_bw()Regresi Polinomial Derajat 2 (ordo 2)
mod_polinomial2 = lm(triceps ~ poly(age,2,raw = T),
data=triceps)
summary(mod_polinomial2)##
## Call:
## lm(formula = triceps ~ poly(age, 2, raw = T), data = triceps)
##
## Residuals:
## Min 1Q Median 3Q Max
## -12.5677 -2.4401 -0.4587 1.6368 24.9961
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 6.0229191 0.3063806 19.658 < 2e-16 ***
## poly(age, 2, raw = T)1 0.2434733 0.0364403 6.681 4.17e-11 ***
## poly(age, 2, raw = T)2 -0.0006257 0.0007926 -0.789 0.43
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 4.008 on 889 degrees of freedom
## Multiple R-squared: 0.3377, Adjusted R-squared: 0.3362
## F-statistic: 226.6 on 2 and 889 DF, p-value: < 2.2e-16
AIC(mod_polinomial2)## [1] 5012.89
ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black") +
stat_smooth(method = "lm",
formula = y~poly(x,2,raw=T),
lty = 1, col = "blue",se = F)+
theme_bw()ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black") +
stat_smooth(method = "lm",
formula = y~poly(x,2,raw=T),
lty = 1, col = "blue",se = T)+
theme_bw()Regresi Polinomial Derajat 3 (ordo 3)
mod_polinomial3 = lm(triceps ~ poly(age,3,raw = T),data=triceps)
summary(mod_polinomial3)##
## Call:
## lm(formula = triceps ~ poly(age, 3, raw = T), data = triceps)
##
## Residuals:
## Min 1Q Median 3Q Max
## -11.5832 -1.9284 -0.5415 1.3283 24.4440
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 8.004e+00 3.831e-01 20.893 < 2e-16 ***
## poly(age, 3, raw = T)1 -3.157e-01 7.721e-02 -4.089 4.73e-05 ***
## poly(age, 3, raw = T)2 3.101e-02 3.964e-03 7.824 1.45e-14 ***
## poly(age, 3, raw = T)3 -4.566e-04 5.612e-05 -8.135 1.38e-15 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 3.868 on 888 degrees of freedom
## Multiple R-squared: 0.3836, Adjusted R-squared: 0.3815
## F-statistic: 184.2 on 3 and 888 DF, p-value: < 2.2e-16
ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black") +
stat_smooth(method = "lm",
formula = y~poly(x,3,raw=T),
lty = 1, col = "blue",se = T)+
theme_bw()AIC <- cbind(AIC(mod_linear),AIC(mod_polinomial2),AIC(mod_polinomial3))MSE = function(pred,actual){
mean((pred-actual)^2)
}MSE(predict(mod_linear),triceps$triceps)## [1] 16.01758
MSE(predict(mod_polinomial2),triceps$triceps)## [1] 16.00636
MSE(predict(mod_polinomial3),triceps$triceps)## [1] 14.89621
Regresi Fungsi Tangga (5)
mod_tangga5 = lm(triceps ~ cut(age,5),data=triceps)
summary(mod_tangga5)##
## Call:
## lm(formula = triceps ~ cut(age, 5), data = triceps)
##
## Residuals:
## Min 1Q Median 3Q Max
## -10.5474 -2.0318 -0.4465 1.3682 23.3759
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 7.2318 0.1994 36.260 < 2e-16 ***
## cut(age, 5)(10.6,20.9] 1.6294 0.3244 5.023 6.16e-07 ***
## cut(age, 5)(20.9,31.2] 5.9923 0.4222 14.192 < 2e-16 ***
## cut(age, 5)(31.2,41.5] 7.5156 0.4506 16.678 < 2e-16 ***
## cut(age, 5)(41.5,51.8] 7.4561 0.5543 13.452 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 3.939 on 887 degrees of freedom
## Multiple R-squared: 0.3617, Adjusted R-squared: 0.3588
## F-statistic: 125.7 on 4 and 887 DF, p-value: < 2.2e-16
ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black") +
stat_smooth(method = "lm",
formula = y~cut(x,5),
lty = 1, col = "blue",se = F)+
theme_bw()Regresi Fungsi Tangga (7)
mod_tangga7 = lm(triceps ~ cut(age,7),data=triceps)
summary(mod_tangga7)##
## Call:
## lm(formula = triceps ~ cut(age, 7), data = triceps)
##
## Residuals:
## Min 1Q Median 3Q Max
## -10.8063 -1.7592 -0.4366 1.2894 23.1461
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 7.5592 0.2219 34.060 < 2e-16 ***
## cut(age, 7)(7.62,15] -0.6486 0.3326 -1.950 0.0515 .
## cut(age, 7)(15,22.3] 3.4534 0.4239 8.146 1.27e-15 ***
## cut(age, 7)(22.3,29.7] 5.8947 0.4604 12.804 < 2e-16 ***
## cut(age, 7)(29.7,37] 7.8471 0.5249 14.949 < 2e-16 ***
## cut(age, 7)(37,44.4] 6.9191 0.5391 12.835 < 2e-16 ***
## cut(age, 7)(44.4,51.8] 6.3013 0.6560 9.606 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 3.805 on 885 degrees of freedom
## Multiple R-squared: 0.4055, Adjusted R-squared: 0.4014
## F-statistic: 100.6 on 6 and 885 DF, p-value: < 2.2e-16
ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black") +
stat_smooth(method = "lm",
formula = y~cut(x,7),
lty = 1, col = "blue",se = F)+
theme_bw()Perbandingan Model
MSE = function(pred,actual){
mean((pred-actual)^2)
}Membandingkan model
nilai_MSE <- rbind(MSE(predict(mod_linear),triceps$triceps),
MSE(predict(mod_polinomial2),triceps$triceps),
MSE(predict(mod_polinomial3),triceps$triceps),
MSE(predict(mod_tangga5),triceps$triceps),
MSE(predict(mod_tangga7),triceps$triceps))
nama_model <- c("Linear","Poly (ordo=2)", "Poly (ordo=3)",
"Tangga (breaks=5)", "Tangga (breaks=7)")
data.frame(nama_model,nilai_MSE)## nama_model nilai_MSE
## 1 Linear 16.01758
## 2 Poly (ordo=2) 16.00636
## 3 Poly (ordo=3) 14.89621
## 4 Tangga (breaks=5) 15.42602
## 5 Tangga (breaks=7) 14.36779
Regresi Spline
#Menentukan banyaknya fungsi basis dan knots
dim(bs(triceps$age, knots = c(10, 20,40)))## [1] 892 6
#atau
dim(bs(triceps$age, df=6))## [1] 892 6
#nilai knots yang ditentukan oleh komputer
attr(bs(triceps$age, df=6),"knots")## 25% 50% 75%
## 5.5475 12.2100 24.7275
b-spline
Menggunakan knot yang ditentukan manual
knot_value_manual_3 = c(10, 20,40)
mod_spline_1 = lm(triceps ~ bs(age, knots =knot_value_manual_3 ),data=triceps)
summary(mod_spline_1)##
## Call:
## lm(formula = triceps ~ bs(age, knots = knot_value_manual_3),
## data = triceps)
##
## Residuals:
## Min 1Q Median 3Q Max
## -11.385 -1.682 -0.393 1.165 22.855
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 8.22533 0.61873 13.294 < 2e-16 ***
## bs(age, knots = knot_value_manual_3)1 -0.06551 1.14972 -0.057 0.955
## bs(age, knots = knot_value_manual_3)2 -4.30051 0.72301 -5.948 3.90e-09 ***
## bs(age, knots = knot_value_manual_3)3 7.80435 1.17793 6.625 6.00e-11 ***
## bs(age, knots = knot_value_manual_3)4 6.14890 1.27439 4.825 1.65e-06 ***
## bs(age, knots = knot_value_manual_3)5 5.56640 1.42225 3.914 9.78e-05 ***
## bs(age, knots = knot_value_manual_3)6 7.90178 1.54514 5.114 3.87e-07 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 3.76 on 885 degrees of freedom
## Multiple R-squared: 0.4195, Adjusted R-squared: 0.4155
## F-statistic: 106.6 on 6 and 885 DF, p-value: < 2.2e-16
ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black") +
stat_smooth(method = "lm",
formula = y~bs(x, knots = knot_value_manual_3),
lty = 1,se = F)Menggunakan knot yang ditentukan fungsi komputer
knot_value_pc_df_6 = attr(bs(triceps$age, df=6),"knots")
mod_spline_1 = lm(triceps ~ bs(age, knots =knot_value_pc_df_6 ),data=triceps)
summary(mod_spline_1)##
## Call:
## lm(formula = triceps ~ bs(age, knots = knot_value_pc_df_6), data = triceps)
##
## Residuals:
## Min 1Q Median 3Q Max
## -11.0056 -1.7556 -0.2944 1.2008 23.0695
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 6.5925 0.8770 7.517 1.37e-13 ***
## bs(age, knots = knot_value_pc_df_6)1 3.7961 1.5015 2.528 0.01164 *
## bs(age, knots = knot_value_pc_df_6)2 -2.0749 0.8884 -2.335 0.01974 *
## bs(age, knots = knot_value_pc_df_6)3 1.5139 1.1645 1.300 0.19391
## bs(age, knots = knot_value_pc_df_6)4 11.6394 1.3144 8.855 < 2e-16 ***
## bs(age, knots = knot_value_pc_df_6)5 5.9680 1.5602 3.825 0.00014 ***
## bs(age, knots = knot_value_pc_df_6)6 8.9127 1.4053 6.342 3.60e-10 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 3.757 on 885 degrees of freedom
## Multiple R-squared: 0.4206, Adjusted R-squared: 0.4167
## F-statistic: 107.1 on 6 and 885 DF, p-value: < 2.2e-16
ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black") +
stat_smooth(method = "lm",
formula = y~bs(x, knots = knot_value_pc_df_6),
lty = 1,se = F)Natural Spline
knot_value_manual_3 = c(10, 20,40)
mod_spline3ns = lm(triceps ~ ns(age, knots = knot_value_manual_3),data=triceps)
summary(mod_spline3ns)##
## Call:
## lm(formula = triceps ~ ns(age, knots = knot_value_manual_3),
## data = triceps)
##
## Residuals:
## Min 1Q Median 3Q Max
## -10.7220 -1.7640 -0.3985 1.1908 22.9684
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 8.8748 0.3742 23.714 < 2e-16 ***
## ns(age, knots = knot_value_manual_3)1 7.0119 0.6728 10.422 < 2e-16 ***
## ns(age, knots = knot_value_manual_3)2 6.0762 0.8625 7.045 3.72e-12 ***
## ns(age, knots = knot_value_manual_3)3 2.0780 1.0632 1.954 0.051 .
## ns(age, knots = knot_value_manual_3)4 8.8616 0.9930 8.924 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 3.762 on 887 degrees of freedom
## Multiple R-squared: 0.4176, Adjusted R-squared: 0.415
## F-statistic: 159 on 4 and 887 DF, p-value: < 2.2e-16
ggplot(triceps,aes(x=age, y=triceps)) +
geom_point(alpha=0.55, color="black")+
stat_smooth(method = "lm",
formula = y~ns(x, knots = knot_value_manual_3),
lty = 1,se=F)Smoothing Splin
model_sms <- with(data = triceps,smooth.spline(age,triceps))
model_sms ## Call:
## smooth.spline(x = age, y = triceps)
##
## Smoothing Parameter spar= 0.5162704 lambda= 1.232259e-06 (13 iterations)
## Equivalent Degrees of Freedom (Df): 50.00639
## Penalized Criterion (RSS): 8591.581
## GCV: 13.77835
Fungsi smooth.spline secara otomatis memilih paramter lambda terbaik dengan menggunakan Cross-Validation (CV) dan Generalized Cross-Validation (GCV). Perbedaan utama antara keduanya adalah pada ukuran kebaikan model yang digunakan. Kalau CV menggunakan MSE sebagai ukuran kebaikan model, sedangkan GCV menggunakan Weighted-MSE. Banyaknya folds (lipatan) pada CV dan GCV ini adalah sejumlah obserbasinya.
pred_data <- broom::augment(model_sms)
ggplot(pred_data,aes(x=x,y=y))+
geom_point(alpha=0.55, color="black")+
geom_line(aes(y=.fitted),col="blue",
lty=1)+
xlab("age")+
ylab("triceps")+
theme_bw()Pengaruh parameter lambda terhadap tingkat smoothness kurva bisa dilihat dari ilustrasi berikut:
model_sms_lambda <- data.frame(lambda=seq(0,5,by=0.5)) %>%
group_by(lambda) %>%
do(broom::augment(with(data = triceps,smooth.spline(age,triceps,lambda = .$lambda))))
p <- ggplot(model_sms_lambda,
aes(x=x,y=y))+
geom_line(aes(y=.fitted),
col="blue",
lty=1
)+
facet_wrap(~lambda)
pJika kita tentukan df=7, maka hasil kurva model smooth.spline akan lebih merepresentasikan data
model_sms <- with(data = triceps,smooth.spline(age,triceps,df=7))
model_sms ## Call:
## smooth.spline(x = age, y = triceps, df = 7)
##
## Smoothing Parameter spar= 1.049874 lambda= 0.008827582 (11 iterations)
## Equivalent Degrees of Freedom (Df): 7.000747
## Penalized Criterion (RSS): 10112.16
## GCV: 14.20355
pred_data <- broom::augment(model_sms)
ggplot(pred_data,aes(x=x,y=y))+
geom_point(alpha=0.55, color="black")+
geom_line(aes(y=.fitted),col="blue",
lty=1)+
xlab("age")+
ylab("triceps")+
theme_bw()LOESS
model_loess <- loess(triceps~age,
data = triceps)
summary(model_loess)## Call:
## loess(formula = triceps ~ age, data = triceps)
##
## Number of Observations: 892
## Equivalent Number of Parameters: 4.6
## Residual Standard Error: 3.777
## Trace of smoother matrix: 5.01 (exact)
##
## Control settings:
## span : 0.75
## degree : 2
## family : gaussian
## surface : interpolate cell = 0.2
## normalize: TRUE
## parametric: FALSE
## drop.square: FALSE
Pengaruh parameter span terhadap tingkat smoothness kurva bisa dilihat dari ilustrasi berikut:
model_loess_span <- data.frame(span=seq(0.1,5,by=0.5)) %>%
group_by(span) %>%
do(broom::augment(loess(triceps~age,
data = triceps,span=.$span)))
p2 <- ggplot(model_loess_span,
aes(x=age,y=triceps))+
geom_line(aes(y=.fitted),
col="blue",
lty=1
)+
facet_wrap(~span)
p2Pendekatan ini juga dapat dilakukan dengan fungsi stat_smooth() pada package ggplot2.
library(ggplot2)
ggplot(triceps, aes(age,triceps)) +
geom_point(alpha=0.5,color="black") +
stat_smooth(method='loess',
formula=y~x,
span = 0.75,
col="blue",
lty=1,
se=F)Tuning span dapat dilakukan dengan menggunakan cross-validation:
set.seed(132)
cross_val <- vfold_cv(triceps,v=10,strata = "triceps")
span <- seq(0.1,1,length.out=50)
best_loess <- map_dfr(span, function(i){
metric_loess <- map_dfr(cross_val$splits,
function(x){
mod <- loess(triceps ~ age,span = i,
data=triceps[x$in_id,])
pred <- predict(mod,
newdata=triceps[-x$in_id,])
truth <- triceps[-x$in_id,]$triceps
data_eval <- na.omit(data.frame(pred=pred,
truth=truth))
rmse <- mlr3measures::rmse(truth = data_eval$truth,
response = data_eval$pred
)
mae <- mlr3measures::mae(truth = data_eval$truth,
response = data_eval$pred
)
metric <- c(rmse,mae)
names(metric) <- c("rmse","mae")
return(metric)
}
)
head(metric_loess,20)
# menghitung rata-rata untuk 10 folds
mean_metric_loess <- colMeans(metric_loess)
mean_metric_loess
}
)
best_loess <- cbind(span=span,best_loess)
# menampilkan hasil all breaks
best_loess## span rmse mae
## 1 0.1000000 3.789016 2.506671
## 2 0.1183673 3.792428 2.509406
## 3 0.1367347 3.785078 2.502333
## 4 0.1551020 3.784977 2.498899
## 5 0.1734694 3.780602 2.494697
## 6 0.1918367 3.775911 2.490984
## 7 0.2102041 3.769278 2.485604
## 8 0.2285714 3.762992 2.479560
## 9 0.2469388 3.757353 2.475655
## 10 0.2653061 3.753034 2.471438
## 11 0.2836735 3.749417 2.468157
## 12 0.3020408 3.745991 2.464595
## 13 0.3204082 3.743019 2.461535
## 14 0.3387755 3.741484 2.459747
## 15 0.3571429 3.740472 2.458881
## 16 0.3755102 3.739457 2.457771
## 17 0.3938776 3.738795 2.457462
## 18 0.4122449 3.738279 2.457694
## 19 0.4306122 3.738199 2.458498
## 20 0.4489796 3.738451 2.459635
## 21 0.4673469 3.738326 2.460398
## 22 0.4857143 3.738761 2.461827
## 23 0.5040816 3.739322 2.463458
## 24 0.5224490 3.740095 2.465381
## 25 0.5408163 3.741026 2.468152
## 26 0.5591837 3.741823 2.470680
## 27 0.5775510 3.742655 2.473551
## 28 0.5959184 3.743625 2.475928
## 29 0.6142857 3.745355 2.479760
## 30 0.6326531 3.747865 2.479508
## 31 0.6510204 3.749228 2.482319
## 32 0.6693878 3.751042 2.486252
## 33 0.6877551 3.753124 2.491824
## 34 0.7061224 3.755236 2.495463
## 35 0.7244898 3.757360 2.498713
## 36 0.7428571 3.759033 2.501964
## 37 0.7612245 3.760655 2.508596
## 38 0.7795918 3.761904 2.515296
## 39 0.7979592 3.762958 2.518621
## 40 0.8163265 3.765701 2.526320
## 41 0.8346939 3.774101 2.546981
## 42 0.8530612 3.782229 2.561268
## 43 0.8714286 3.788477 2.571948
## 44 0.8897959 3.793832 2.582574
## 45 0.9081633 3.802290 2.600068
## 46 0.9265306 3.809718 2.616593
## 47 0.9448980 3.815215 2.628792
## 48 0.9632653 3.829286 2.660092
## 49 0.9816327 3.840751 2.683652
## 50 1.0000000 3.868255 2.724199
#berdasarkan rmse
best_loess %>% slice_min(rmse)## span rmse mae
## 1 0.4306122 3.738199 2.458498
#berdasarkan mae
best_loess %>% slice_min(mae)## span rmse mae
## 1 0.3938776 3.738795 2.457462
library(ggplot2)
ggplot(triceps, aes(age,triceps)) +
geom_point(alpha=0.5,color="black") +
stat_smooth(method='loess',
formula=y~x,
span = 0.4306122,
col="blue",
lty=1,
se=F)4. Evaluasi Model Menggunakan Cross Validation
Regresi Linear
set.seed(132)
cross_val <- vfold_cv(triceps,v=10,strata = "triceps")
metric_linear <- map_dfr(cross_val$splits,
function(x){
mod <- lm(triceps ~ age,data=triceps[x$in_id,])
pred <- predict(mod,newdata=triceps[-x$in_id,])
truth <- triceps[-x$in_id,]$triceps
rmse <- mlr3measures::rmse(truth = truth,response = pred)
mae <- mlr3measures::mae(truth = truth,response = pred)
metric <- c(rmse,mae)
names(metric) <- c("rmse","mae")
return(metric)
}
)
metric_linear## # A tibble: 10 x 2
## rmse mae
## <dbl> <dbl>
## 1 3.47 2.63
## 2 5.45 3.49
## 3 3.73 2.67
## 4 3.64 2.82
## 5 3.82 2.66
## 6 4.01 2.72
## 7 4.02 2.83
## 8 4.27 3.09
## 9 3.41 2.61
## 10 3.92 2.85
# menghitung rata-rata untuk 10 folds
mean_metric_linear <- colMeans(metric_linear)
mean_metric_linear## rmse mae
## 3.974353 2.837196
Polynomial Derajat 2 (ordo 2)
set.seed(132)
cross_val <- vfold_cv(triceps,v=10,strata = "triceps")
metric_poly2 <- map_dfr(cross_val$splits,
function(x){
mod <- lm(triceps ~ poly(age,2,raw = T),data=triceps[x$in_id,])
pred <- predict(mod,newdata=triceps[-x$in_id,])
truth <- triceps[-x$in_id,]$triceps
rmse <- mlr3measures::rmse(truth = truth,response = pred)
mae <- mlr3measures::mae(truth = truth,response = pred)
metric <- c(rmse,mae)
names(metric) <- c("rmse","mae")
return(metric)
}
)
metric_poly2## # A tibble: 10 x 2
## rmse mae
## <dbl> <dbl>
## 1 3.48 2.67
## 2 5.46 3.49
## 3 3.73 2.67
## 4 3.65 2.85
## 5 3.85 2.70
## 6 4.00 2.72
## 7 4.04 2.86
## 8 4.28 3.13
## 9 3.44 2.64
## 10 3.91 2.85
# menghitung rata-rata untuk 10 folds
mean_metric_poly2 <- colMeans(metric_poly2)
mean_metric_poly2## rmse mae
## 3.985185 2.859080
Polynomial Derajat 3 (ordo 3)
set.seed(132)
cross_val <- vfold_cv(triceps,v=10,strata = "triceps")
metric_poly3 <- map_dfr(cross_val$splits,
function(x){
mod <- lm(triceps ~ poly(age,3,raw = T),data=triceps[x$in_id,])
pred <- predict(mod,newdata=triceps[-x$in_id,])
truth <- triceps[-x$in_id,]$triceps
rmse <- mlr3measures::rmse(truth = truth,response = pred)
mae <- mlr3measures::mae(truth = truth,response = pred)
metric <- c(rmse,mae)
names(metric) <- c("rmse","mae")
return(metric)
}
)
metric_poly3## # A tibble: 10 x 2
## rmse mae
## <dbl> <dbl>
## 1 3.30 2.39
## 2 5.22 3.22
## 3 3.39 2.42
## 4 3.61 2.55
## 5 3.61 2.44
## 6 3.94 2.66
## 7 3.84 2.66
## 8 4.32 3.05
## 9 3.68 2.42
## 10 3.64 2.57
# menghitung rata-rata untuk 10 folds
mean_metric_poly3 <- colMeans(metric_poly3)
mean_metric_poly3## rmse mae
## 3.855737 2.638939
Fungsi Tangga
set.seed(132)
cross_val <- vfold_cv(triceps,v=10,strata = "triceps")
breaks <- 3:10
best_tangga <- map_dfr(breaks, function(i){
metric_tangga <- map_dfr(cross_val$splits,
function(x){
training <- triceps[x$in_id,]
training$age <- cut(training$age,i)
mod <- lm(triceps ~ age,data=training)
labs_x <- levels(mod$model[,2])
labs_x_breaks <- cbind(lower = as.numeric( sub("\\((.+),.*", "\\1", labs_x) ),
upper = as.numeric( sub("[^,]*,([^]]*)\\]", "\\1", labs_x) ))
testing <- triceps[-x$in_id,]
age_new <- cut(testing$age,c(labs_x_breaks[1,1],labs_x_breaks[,2]))
pred <- predict(mod,newdata=list(age=age_new))
truth <- testing$triceps
data_eval <- na.omit(data.frame(truth,pred))
rmse <- mlr3measures::rmse(truth = data_eval$truth,response = data_eval$pred)
mae <- mlr3measures::mae(truth = data_eval$truth,response = data_eval$pred)
metric <- c(rmse,mae)
names(metric) <- c("rmse","mae")
return(metric)
}
)
metric_tangga
# menghitung rata-rata untuk 10 folds
mean_metric_tangga <- colMeans(metric_tangga)
mean_metric_tangga
}
)
best_tangga <- cbind(breaks=breaks,best_tangga)
# menampilkan hasil all breaks
best_tangga## breaks rmse mae
## 1 3 3.824557 2.618013
## 2 4 3.901877 2.662765
## 3 5 3.925712 2.726059
## 4 6 3.832698 2.624551
## 5 7 3.815164 2.572888
## 6 8 3.818020 2.554495
## 7 9 3.786140 2.524581
## 8 10 3.819849 2.542902
#berdasarkan rmse
best_tangga %>% slice_min(rmse)## breaks rmse mae
## 1 9 3.78614 2.524581
#berdasarkan mae
best_tangga %>% slice_min(mae)## breaks rmse mae
## 1 9 3.78614 2.524581
Perbandingan Model
nilai_metric <- rbind(mean_metric_linear,
mean_metric_poly2,
mean_metric_poly3,
best_tangga %>% select(-1) %>% slice_min(mae))
nama_model <- c("Linear","Poly2","Poly3","Tangga (breaks=9)")
data.frame(nama_model,nilai_metric)## nama_model rmse mae
## 1 Linear 3.974353 2.837196
## 2 Poly2 3.985185 2.859080
## 3 Poly3 3.855737 2.638939
## 4 Tangga (breaks=9) 3.786140 2.524581