7.2
library(mlbench)
set.seed(34)
trainingData <- mlbench.friedman1(200, sd = 1)
trainingData$x## [,1] [,2] [,3] [,4] [,5]
## [1,] 0.444768541 0.0491703867 0.368916968 0.09420248 0.9574439991
## [2,] 0.998540404 0.7377619252 0.342259319 0.90469829 0.0902387861
## [3,] 0.884893993 0.3621865171 0.204417247 0.15297192 0.6138568367
## [4,] 0.238425959 0.9007802284 0.138549710 0.38504523 0.8973691536
## [5,] 0.227313807 0.6280596920 0.881220208 0.51938871 0.4614294693
## [6,] 0.847769418 0.0789131550 0.153981640 0.72946379 0.5377695030
## [7,] 0.282561733 0.8352296567 0.030182170 0.61709185 0.5845064647
## [8,] 0.717608585 0.0007378496 0.871266484 0.65195225 0.0030892033
## [9,] 0.396051199 0.2195744149 0.815551894 0.40538081 0.5927632796
## [10,] 0.574659499 0.8168590870 0.791370922 0.43878356 0.4325678525
## [11,] 0.324377608 0.9417869952 0.395270071 0.87039433 0.4458680137
## [12,] 0.439419711 0.2315778644 0.440494188 0.34865757 0.6852223438
## [13,] 0.748768697 0.0644139852 0.700183252 0.99500739 0.5209169432
## [14,] 0.046812724 0.1256532029 0.513418385 0.38181832 0.4461647172
## [15,] 0.197936578 0.4618988184 0.448223552 0.50821137 0.2313842780
## [16,] 0.811761832 0.9715669921 0.932230789 0.85840135 0.6838182241
## [17,] 0.856969941 0.9831968301 0.911715439 0.76371968 0.4088150410
## [18,] 0.015575713 0.9240180948 0.386420507 0.11657560 0.5894136932
## [19,] 0.497023704 0.9498920620 0.512469367 0.61062973 0.9484871416
## [20,] 0.493202718 0.9081235141 0.317971562 0.39507446 0.7964035347
## [21,] 0.343518213 0.6365828402 0.606231719 0.04710814 0.3049227316
## [22,] 0.885414216 0.8040895669 0.896320121 0.32830102 0.4683117913
## [23,] 0.763962795 0.6458910676 0.601845442 0.10770140 0.2152711444
## [24,] 0.223158354 0.1260052160 0.521635279 0.77432076 0.0110986503
## [25,] 0.428553257 0.0126592580 0.229430367 0.98228303 0.7261406907
## [26,] 0.962311991 0.9641884365 0.730220835 0.20339069 0.9138799908
## [27,] 0.852263517 0.6305754054 0.269333133 0.38438750 0.6587024487
## [28,] 0.725295438 0.7225274001 0.223627059 0.48684929 0.7913433237
## [29,] 0.655883775 0.4519846092 0.228891338 0.70200926 0.1294506867
## [30,] 0.538854275 0.5118083574 0.799164373 0.22703794 0.2028152633
## [31,] 0.912512446 0.7497878931 0.884252719 0.22787423 0.4062946066
## [32,] 0.854506270 0.9948555343 0.341544044 0.50874327 0.2879803006
## [33,] 0.507669847 0.8450220823 0.012610905 0.70538338 0.4425005966
## [34,] 0.417403751 0.3715221158 0.741862779 0.06189380 0.5203055860
## [35,] 0.319385502 0.5183500256 0.209970027 0.19381778 0.9803088338
## [36,] 0.050905606 0.4532380477 0.998373389 0.32080408 0.1997841180
## [37,] 0.032689196 0.3586525710 0.146905211 0.73042228 0.6740772719
## [38,] 0.543507183 0.9193158848 0.821558192 0.88503577 0.3337999356
## [39,] 0.389838133 0.3780508987 0.100127387 0.48458875 0.2343060367
## [40,] 0.597955467 0.9223278852 0.799427686 0.26862640 0.8049633363
## [41,] 0.062913177 0.4104067110 0.743223988 0.48279231 0.2537052869
## [42,] 0.051205630 0.3404404137 0.679158788 0.93524023 0.4218629759
## [43,] 0.994554141 0.7824692673 0.970845583 0.10933371 0.3353833051
## [44,] 0.665415152 0.8889030213 0.053967729 0.41526196 0.3671296954
## [45,] 0.139641646 0.9570748035 0.891521180 0.21892298 0.8216980875
## [46,] 0.676683487 0.6963766955 0.945666182 0.41500226 0.2678030753
## [47,] 0.077136747 0.8721398283 0.050521298 0.86125750 0.5854289632
## [48,] 0.971863421 0.0488422054 0.620843122 0.38353413 0.7737276729
## [49,] 0.663357329 0.1133197579 0.325368669 0.45994958 0.2000149002
## [50,] 0.901817372 0.6957448327 0.917921318 0.07167142 0.2474781023
## [51,] 0.782706988 0.3046227591 0.832583969 0.49118711 0.5576036857
## [52,] 0.315006143 0.0264379731 0.342809462 0.08257746 0.1281603670
## [53,] 0.184207101 0.4358237558 0.823976185 0.41370897 0.8460202492
## [54,] 0.544767941 0.7832784902 0.693698988 0.14805924 0.2115339858
## [55,] 0.307176637 0.5582088544 0.350257230 0.45919853 0.6840995164
## [56,] 0.392564248 0.4424935116 0.858782122 0.28315763 0.8281292974
## [57,] 0.405407821 0.0086052949 0.084069085 0.22397520 0.7180219283
## [58,] 0.297241808 0.3516091432 0.681246782 0.93091833 0.6196570857
## [59,] 0.878596609 0.3391918447 0.004850209 0.27491438 0.8340926594
## [60,] 0.134682704 0.7983221468 0.571066241 0.04093563 0.1759040013
## [61,] 0.212395017 0.1316446301 0.593240862 0.82753743 0.8089628755
## [62,] 0.671838924 0.8706680990 0.149602911 0.27793165 0.2466843172
## [63,] 0.887044479 0.9989109151 0.740051109 0.79067137 0.7774439163
## [64,] 0.967587590 0.7951548318 0.742139631 0.93636348 0.1139824402
## [65,] 0.110956672 0.3390305708 0.887193116 0.32945714 0.2341976862
## [66,] 0.652666080 0.1806266503 0.470756157 0.92242981 0.1285020960
## [67,] 0.498635186 0.4911098506 0.332094103 0.07590527 0.3382013026
## [68,] 0.864985135 0.0038336336 0.198297722 0.54925251 0.6465511213
## [69,] 0.766760023 0.7426390881 0.833291957 0.46037071 0.9621582825
## [70,] 0.052414103 0.9524270166 0.855815382 0.34961778 0.6145240299
## [71,] 0.308068041 0.7961057469 0.612479734 0.27839692 0.8219093538
## [72,] 0.320141932 0.8112413019 0.025949088 0.68061617 0.3727320936
## [73,] 0.571202859 0.9321366083 0.082974488 0.96122229 0.5668621503
## [74,] 0.900813500 0.2702452699 0.261115218 0.15666676 0.4113571390
## [75,] 0.523988310 0.3259943670 0.830752086 0.25165417 0.4929841061
## [76,] 0.228802189 0.7386499224 0.032324913 0.25156912 0.3693464668
## [77,] 0.547581981 0.3158599921 0.251300620 0.37244229 0.1813268836
## [78,] 0.670443604 0.4074007007 0.754071520 0.57308828 0.3209288660
## [79,] 0.490485659 0.8959853132 0.910069087 0.04176620 0.0002780033
## [80,] 0.425974223 0.6567709954 0.238825056 0.71085959 0.9736491914
## [81,] 0.244999713 0.1872989838 0.330197217 0.94824584 0.2898637550
## [82,] 0.975494381 0.2871816973 0.020322232 0.09457274 0.2145658007
## [83,] 0.276083698 0.4675739042 0.263259275 0.90930949 0.6434848479
## [84,] 0.193209777 0.2697519700 0.661962211 0.21348130 0.2150562122
## [85,] 0.009662087 0.6180296263 0.351023349 0.13273282 0.9390394876
## [86,] 0.033046125 0.3926116971 0.697186010 0.18704815 0.5308191765
## [87,] 0.036393601 0.7335632718 0.715696034 0.54884754 0.3867252860
## [88,] 0.761883414 0.2870064350 0.295105996 0.61629910 0.4575986047
## [89,] 0.450880646 0.5854928258 0.157046239 0.49925120 0.1581427869
## [90,] 0.478382837 0.4932246460 0.451003641 0.73200803 0.5146277505
## [91,] 0.809401206 0.0843031078 0.887260949 0.10324899 0.0133572216
## [92,] 0.835698500 0.6542822297 0.058708795 0.05115633 0.4846461245
## [93,] 0.561387506 0.6125941994 0.257787813 0.58811538 0.7474705824
## [94,] 0.032109353 0.8580892808 0.786645186 0.82876159 0.4671700210
## [95,] 0.970210386 0.1111186729 0.565882814 0.63914828 0.5319261521
## [96,] 0.825061014 0.9052367806 0.406739496 0.32035855 0.8577351610
## [97,] 0.213852433 0.8766797285 0.021651195 0.79162727 0.4177978658
## [98,] 0.814000367 0.4356837904 0.821300018 0.75650515 0.2926091992
## [99,] 0.776467626 0.9364987162 0.630816365 0.35463563 0.2186427990
## [100,] 0.816974296 0.4714190126 0.877292458 0.44610680 0.6842931376
## [101,] 0.854419573 0.4696112052 0.314908787 0.42570522 0.4045962824
## [102,] 0.615831804 0.6425703585 0.214399404 0.39860715 0.2160483550
## [103,] 0.133673755 0.1202452520 0.711679708 0.83234970 0.2950728193
## [104,] 0.481118802 0.1079374114 0.505319767 0.40617261 0.5247520932
## [105,] 0.224811602 0.5589396034 0.495181278 0.34587620 0.8556023829
## [106,] 0.919606563 0.6451381017 0.287114171 0.60516106 0.8542669802
## [107,] 0.466478825 0.9306448875 0.703875294 0.03845745 0.7759074960
## [108,] 0.800379856 0.3520184215 0.267456817 0.48110333 0.8685758680
## [109,] 0.151755133 0.2936923867 0.621256201 0.14185311 0.3776210353
## [110,] 0.287878932 0.1294069889 0.670613538 0.94751308 0.0611062546
## [111,] 0.832147317 0.6827595592 0.538644945 0.87725762 0.0213962093
## [112,] 0.356565348 0.6966314232 0.650606939 0.94441839 0.4798845255
## [113,] 0.152505924 0.2675740977 0.769657922 0.59627194 0.3957366589
## [114,] 0.961865200 0.8681412905 0.417123359 0.39263766 0.5986055222
## [115,] 0.896964582 0.8410316706 0.679399607 0.73320922 0.1398790102
## [116,] 0.204163874 0.4206810493 0.438550440 0.78816967 0.4613876510
## [117,] 0.497343759 0.2990697299 0.386641323 0.09119469 0.1647713403
## [118,] 0.489158380 0.3512176229 0.620177293 0.43710558 0.0575959291
## [119,] 0.891419917 0.0752101669 0.437335404 0.60789275 0.1817525383
## [120,] 0.834367346 0.1640163781 0.017495631 0.36233936 0.5946028228
## [121,] 0.152151143 0.0654842497 0.914706602 0.82691729 0.8866709257
## [122,] 0.077801950 0.0864140247 0.303029720 0.98498263 0.8551978797
## [123,] 0.919698349 0.0244409135 0.842882600 0.21159825 0.4052810655
## [124,] 0.101779516 0.3098834711 0.354098768 0.20239202 0.7454694749
## [125,] 0.095242802 0.6392834603 0.173609184 0.32176229 0.0203380659
## [126,] 0.193841641 0.9907928167 0.260202341 0.07933449 0.9918988976
## [127,] 0.036823593 0.8630254937 0.971392875 0.72657064 0.8669382969
## [128,] 0.561085408 0.5625008566 0.526969026 0.44022622 0.3591023455
## [129,] 0.234392816 0.3565410944 0.417501493 0.68574860 0.4071471419
## [130,] 0.981812039 0.1244116481 0.508118408 0.18056374 0.3507680686
## [131,] 0.619754707 0.7228571586 0.663559444 0.86207013 0.6430875901
## [132,] 0.813506609 0.2743458373 0.917629394 0.99675280 0.4368257870
## [133,] 0.800164898 0.0441239236 0.424362444 0.11662314 0.9388706437
## [134,] 0.899665130 0.7393188539 0.386799271 0.82520710 0.1900425423
## [135,] 0.752030377 0.8582679760 0.225018195 0.21361169 0.0410592030
## [136,] 0.861150882 0.4040625703 0.338629154 0.60826687 0.2062064887
## [137,] 0.748720194 0.7244013348 0.437859140 0.97519620 0.8481824000
## [138,] 0.876728287 0.7754062298 0.718574522 0.82742769 0.7321053317
## [139,] 0.315764737 0.6592402020 0.912133243 0.09921118 0.0867252313
## [140,] 0.334902205 0.8549364691 0.228035676 0.26189231 0.1372012722
## [141,] 0.691769472 0.4665126281 0.342140663 0.10310695 0.6957101361
## [142,] 0.799386723 0.9737342405 0.659086201 0.38998818 0.0284510481
## [143,] 0.211814563 0.2094773888 0.335905776 0.89576178 0.8490476662
## [144,] 0.863388704 0.8051897408 0.698793563 0.39294279 0.6397110389
## [145,] 0.299014319 0.2322728748 0.821760304 0.28795748 0.0173770604
## [146,] 0.030932782 0.1437713674 0.799826668 0.44538699 0.8895165317
## [147,] 0.991173455 0.9414315266 0.880508907 0.68800980 0.1604651851
## [148,] 0.657293510 0.9838931826 0.631385729 0.24886320 0.7518763188
## [149,] 0.336038610 0.9177695345 0.050762983 0.73806679 0.2649184824
## [150,] 0.236501853 0.3803842708 0.309234829 0.45785144 0.0502900621
## [151,] 0.136846762 0.4889345157 0.591108880 0.15379838 0.9748671581
## [152,] 0.979022667 0.5690807805 0.075898709 0.99450185 0.7284732007
## [153,] 0.475691647 0.9337502327 0.227345680 0.43513026 0.5342239176
## [154,] 0.886751693 0.2733569711 0.292474349 0.59335793 0.4918541790
## [155,] 0.063279937 0.4328826687 0.032418815 0.08293577 0.7858178117
## [156,] 0.732777831 0.4274693204 0.254223770 0.14317763 0.2494044087
## [157,] 0.078506095 0.7922170395 0.231370079 0.77365803 0.5228424673
## [158,] 0.605542770 0.8337273479 0.889497988 0.50332062 0.8549551300
## [159,] 0.768770688 0.9165921314 0.455000776 0.06770817 0.5492767242
## [160,] 0.555850352 0.7900542824 0.498564781 0.24806999 0.4670313271
## [161,] 0.649365293 0.9968941640 0.321817087 0.14574780 0.4581518150
## [162,] 0.550964889 0.7695331858 0.251298605 0.22205128 0.5368936798
## [163,] 0.899306411 0.6929694430 0.810302248 0.44825475 0.2729434187
## [164,] 0.585709204 0.6273939707 0.371526687 0.73741117 0.3611565675
## [165,] 0.951978466 0.4604132448 0.391545556 0.30836776 0.5027814964
## [166,] 0.136618037 0.9574343008 0.702811488 0.54891173 0.8238061103
## [167,] 0.628570701 0.3267497604 0.080907904 0.56980389 0.0237474067
## [168,] 0.157044585 0.5545889824 0.486731739 0.59465489 0.5132933659
## [169,] 0.636425205 0.4988269992 0.827594580 0.94759290 0.8925688071
## [170,] 0.480861684 0.5119195797 0.486508619 0.68364140 0.0692791494
## [171,] 0.832144199 0.3094669573 0.444606682 0.24248652 0.5391821628
## [172,] 0.771588660 0.2776848068 0.644553584 0.96615702 0.5994494248
## [173,] 0.324167282 0.4779637468 0.290042932 0.98496498 0.9369633584
## [174,] 0.659356948 0.3893800799 0.878292457 0.27983718 0.3053008642
## [175,] 0.532519924 0.8680363030 0.172444648 0.64148028 0.2684861100
## [176,] 0.204974036 0.8322339493 0.077403517 0.22397140 0.5062699006
## [177,] 0.323129719 0.5497837078 0.983574699 0.18919426 0.4738134169
## [178,] 0.264670789 0.4978049984 0.958653315 0.72888228 0.6640703841
## [179,] 0.678093939 0.1472444662 0.340870938 0.29937942 0.8458849152
## [180,] 0.081928143 0.5700224715 0.945648151 0.26450984 0.2608172679
## [181,] 0.530174532 0.7193907879 0.138842501 0.47844784 0.9947047532
## [182,] 0.874421860 0.0891601404 0.347074623 0.89780599 0.4196873165
## [183,] 0.122928002 0.2104280407 0.096300739 0.70509625 0.2406842108
## [184,] 0.228643302 0.5566141156 0.093478653 0.27288204 0.5062748673
## [185,] 0.740861977 0.9196657415 0.022499489 0.35668120 0.5199868476
## [186,] 0.220109959 0.0451193023 0.244784819 0.35164977 0.5164993696
## [187,] 0.955411533 0.1815741358 0.423669373 0.77230389 0.4722417286
## [188,] 0.926571538 0.5692105074 0.380876869 0.61001785 0.4054445613
## [189,] 0.021300168 0.4407202841 0.881928453 0.25829609 0.7374271080
## [190,] 0.984092961 0.2713171216 0.571687629 0.80697581 0.4740197924
## [191,] 0.742446133 0.9452833366 0.476221022 0.66289733 0.9301175450
## [192,] 0.751733591 0.5562654356 0.232575751 0.19756353 0.3109094498
## [193,] 0.766611766 0.5414471820 0.372332766 0.78676807 0.7206510790
## [194,] 0.157572611 0.4247918581 0.871836315 0.59670492 0.4885888107
## [195,] 0.437478758 0.1763298109 0.573813990 0.89066780 0.9150138672
## [196,] 0.578849450 0.3362799759 0.354771375 0.84511931 0.7281304575
## [197,] 0.526632831 0.4579694362 0.018996936 0.29782793 0.4220163056
## [198,] 0.143641228 0.9007345615 0.894480049 0.09801461 0.7133818276
## [199,] 0.761935409 0.9532042502 0.358156066 0.31161534 0.2381420690
## [200,] 0.812187465 0.0481019330 0.825925712 0.38218165 0.1195313639
## [,6] [,7] [,8] [,9] [,10]
## [1,] 0.6292917978 0.38458588 0.1215394123 0.864269432 0.8520212951
## [2,] 0.8067407543 0.26742497 0.2821895389 0.429695541 0.3037606850
## [3,] 0.2455792942 0.41526178 0.1356330144 0.384571484 0.7805942637
## [4,] 0.1851186294 0.66415989 0.9207540751 0.337606930 0.1004382896
## [5,] 0.2662024347 0.45752316 0.1528321323 0.891014373 0.5831266025
## [6,] 0.2484438769 0.90937421 0.6525453520 0.077709112 0.3774153506
## [7,] 0.1139205289 0.39894414 0.9487702441 0.250756228 0.3364991476
## [8,] 0.6222261169 0.86562914 0.5987436329 0.639540625 0.1951235996
## [9,] 0.4307985029 0.71372525 0.1362698006 0.096666414 0.1876214633
## [10,] 0.2587662986 0.58276392 0.1785566786 0.252152989 0.3784964588
## [11,] 0.4133484359 0.73007440 0.5512193858 0.852327782 0.3907520983
## [12,] 0.1249177314 0.13465800 0.6303301824 0.809050875 0.3534236783
## [13,] 0.4749329579 0.86276763 0.9031925818 0.326213229 0.8705161638
## [14,] 0.9830588233 0.22569001 0.7847172678 0.713712744 0.9621141220
## [15,] 0.0152999223 0.42319783 0.3034593344 0.739910517 0.4003052474
## [16,] 0.0058389865 0.82850201 0.2113810801 0.214451857 0.0462677414
## [17,] 0.7906984100 0.42119492 0.6612731428 0.476091998 0.2815749862
## [18,] 0.3897138829 0.40086436 0.8795136325 0.495412341 0.8192956529
## [19,] 0.3019062057 0.19780485 0.1277885674 0.847265005 0.2406868988
## [20,] 0.5309960167 0.88392626 0.9083913798 0.509214530 0.5653414675
## [21,] 0.6056312406 0.65295414 0.7852160940 0.146114688 0.4463825258
## [22,] 0.2861065315 0.32603377 0.2118697974 0.850583501 0.0993997748
## [23,] 0.8800012432 0.49433710 0.6651614700 0.284389477 0.5321122233
## [24,] 0.8994572905 0.75449945 0.1201711993 0.338501038 0.8208115268
## [25,] 0.2982519220 0.19808121 0.5893972670 0.947489619 0.7931440244
## [26,] 0.6280854188 0.72620965 0.3209858944 0.264557422 0.3476461943
## [27,] 0.1705386844 0.44818460 0.2638411776 0.106812025 0.5540756104
## [28,] 0.9581705360 0.77296929 0.6030892355 0.776811189 0.5965659379
## [29,] 0.8559326925 0.20416019 0.1970981476 0.776874400 0.2235677531
## [30,] 0.7429501656 0.60843414 0.1615384098 0.207277806 0.9246709698
## [31,] 0.8189896953 0.76965347 0.8903108700 0.197305448 0.0820506092
## [32,] 0.7933381393 0.70006249 0.5651295306 0.341435637 0.3397312078
## [33,] 0.9938175408 0.11584791 0.0444243609 0.580420954 0.9822044973
## [34,] 0.2321643862 0.10939190 0.3426901980 0.316033534 0.8644264699
## [35,] 0.4052331252 0.48931032 0.9510238429 0.470990010 0.8488984788
## [36,] 0.3944642670 0.25441935 0.5808517726 0.472057863 0.7625171999
## [37,] 0.3190778764 0.13313291 0.3383714182 0.197918270 0.4207189612
## [38,] 0.6561929260 0.97607036 0.2197752686 0.790798550 0.2790792978
## [39,] 0.2763341693 0.60072902 0.2818121715 0.797505846 0.8593643245
## [40,] 0.9457077389 0.27613346 0.5703233073 0.781235632 0.2139608837
## [41,] 0.5839659714 0.92888522 0.4085799651 0.856452388 0.2977794926
## [42,] 0.5371840096 0.60122986 0.3277163371 0.839028873 0.5526694821
## [43,] 0.9342741899 0.06983238 0.5517858458 0.382782364 0.3208722172
## [44,] 0.7415342911 0.31637070 0.4417527216 0.775551144 0.2935530862
## [45,] 0.3203892512 0.81034121 0.1281705804 0.007499525 0.5957063211
## [46,] 0.7310555065 0.27281509 0.1185793274 0.783310859 0.4743276807
## [47,] 0.2507446394 0.07623093 0.2248757505 0.521334800 0.0457513840
## [48,] 0.7644736234 0.06835995 0.3871545675 0.037133610 0.8177816232
## [49,] 0.1499585507 0.35943540 0.3089316825 0.901292979 0.0487910195
## [50,] 0.1278368144 0.60852671 0.9890616357 0.044676271 0.4115151099
## [51,] 0.4840969816 0.24382305 0.8892481686 0.394964202 0.2020597518
## [52,] 0.6010938922 0.76341807 0.1319081993 0.747851586 0.2469093541
## [53,] 0.0406050123 0.30278637 0.2874643628 0.483065483 0.2264301430
## [54,] 0.8139282970 0.71914793 0.3203937295 0.863601431 0.1933400186
## [55,] 0.8225189694 0.28222055 0.5951168248 0.295982166 0.8215486112
## [56,] 0.1226780200 0.48243126 0.1391413538 0.002799573 0.0009167292
## [57,] 0.5884957830 0.20631121 0.4117878580 0.204133132 0.1843188568
## [58,] 0.6131424692 0.57724456 0.9255893512 0.710483129 0.5590017978
## [59,] 0.8876600771 0.93315959 0.3945814767 0.421603987 0.7708637107
## [60,] 0.0173959178 0.21897263 0.2951378846 0.600619329 0.2378141689
## [61,] 0.0001087531 0.13670639 0.1460415956 0.256730017 0.1209285269
## [62,] 0.2284741248 0.75268374 0.9080629004 0.943129062 0.0775409674
## [63,] 0.9445946869 0.61585773 0.1353869836 0.799761379 0.0604876340
## [64,] 0.6698846808 0.02610746 0.9493165135 0.347090631 0.3237658013
## [65,] 0.0691140748 0.51413923 0.7394067033 0.627828851 0.2191394214
## [66,] 0.3582457737 0.46968387 0.9766390792 0.781364260 0.2309241635
## [67,] 0.3900472713 0.53143557 0.1438009043 0.083109800 0.9923764516
## [68,] 0.4924089559 0.85523019 0.8210457389 0.605907152 0.4268372264
## [69,] 0.2362820508 0.25945670 0.8207873120 0.489545648 0.2142948690
## [70,] 0.1655986032 0.19534139 0.1353848879 0.883279711 0.7543655634
## [71,] 0.8489977010 0.90714720 0.0004494507 0.135185259 0.5535748431
## [72,] 0.9298924080 0.52684461 0.6057003518 0.269645364 0.7806452233
## [73,] 0.6323839023 0.67599354 0.4532112607 0.907633202 0.8646424545
## [74,] 0.4426674470 0.52112814 0.9722401747 0.068698060 0.5461974926
## [75,] 0.8672543929 0.95864471 0.9285342940 0.326077993 0.2602767814
## [76,] 0.0981771953 0.27644111 0.6413498351 0.588181271 0.4998908893
## [77,] 0.5370299404 0.25553320 0.3444347403 0.175188267 0.5974880976
## [78,] 0.9120113957 0.03912957 0.4787233479 0.370095088 0.4011987778
## [79,] 0.3656011589 0.24572375 0.3336354871 0.143957757 0.9490070925
## [80,] 0.5942407069 0.90179290 0.7731464931 0.180479118 0.1894912110
## [81,] 0.0458112657 0.72364741 0.7408202901 0.808505162 0.4205632056
## [82,] 0.4076461145 0.74053887 0.4162067166 0.090692795 0.6745304288
## [83,] 0.5684702920 0.03992078 0.3234011929 0.806153088 0.0186410726
## [84,] 0.5845715150 0.66204925 0.7005104905 0.296913448 0.7714271788
## [85,] 0.4803088880 0.57181323 0.9904097477 0.934821429 0.0386857938
## [86,] 0.9427057062 0.65202006 0.6739595360 0.702518622 0.1633884166
## [87,] 0.7167628531 0.04682515 0.5113688556 0.387826467 0.2768094314
## [88,] 0.9727265255 0.02480272 0.4685928104 0.502977556 0.7240222553
## [89,] 0.4241232842 0.70005983 0.5800409992 0.842819298 0.9613207267
## [90,] 0.9622193279 0.32408030 0.6369550761 0.024918557 0.4018625957
## [91,] 0.7789402676 0.08382532 0.3823109691 0.660598623 0.6121315351
## [92,] 0.0077233068 0.84694345 0.9828596576 0.092243286 0.9376979368
## [93,] 0.3927995169 0.65142853 0.3314361467 0.931713114 0.0445185276
## [94,] 0.6202455116 0.47953029 0.2267527971 0.980113736 0.4092375883
## [95,] 0.2697588850 0.56703962 0.5727488748 0.724345015 0.0971712412
## [96,] 0.8579049830 0.71129579 0.2273991520 0.379008924 0.6505298188
## [97,] 0.9105288459 0.67845176 0.1196764805 0.147194198 0.7934118740
## [98,] 0.5231846510 0.73907259 0.5573922247 0.480131395 0.9515481780
## [99,] 0.1603342602 0.62244605 0.3415684910 0.925197898 0.3788565577
## [100,] 0.7121645662 0.13004444 0.5372014712 0.404318234 0.1167977806
## [101,] 0.7629839233 0.73690795 0.4928699178 0.373021704 0.5358814374
## [102,] 0.0436173580 0.41739097 0.4754432756 0.631007765 0.0907530447
## [103,] 0.1690742334 0.96602172 0.9376587567 0.235497323 0.9331555758
## [104,] 0.9590265390 0.14411046 0.3392822377 0.123860870 0.3512217533
## [105,] 0.8985128088 0.93272246 0.5179623081 0.613360144 0.6678639553
## [106,] 0.2393613579 0.45002693 0.4175546006 0.182090362 0.7875546489
## [107,] 0.2028899491 0.23551350 0.4567197601 0.878322096 0.7914687973
## [108,] 0.9057147247 0.44899586 0.7900478840 0.520401004 0.3534726845
## [109,] 0.4975222235 0.67995324 0.7130455736 0.138480710 0.8146341147
## [110,] 0.4817214396 0.07030963 0.3071334406 0.400901738 0.7381759854
## [111,] 0.3141533069 0.09432316 0.6544006907 0.932612633 0.6307414989
## [112,] 0.4808175629 0.16937521 0.6129573258 0.744659926 0.9737533082
## [113,] 0.1111585461 0.07468505 0.6018439184 0.295497803 0.1249790599
## [114,] 0.4128927670 0.57599404 0.4855924896 0.327204324 0.5983377167
## [115,] 0.2137827124 0.20419832 0.6311057317 0.073009515 0.6922947068
## [116,] 0.1968133678 0.53183518 0.6608771768 0.203285026 0.9385663311
## [117,] 0.2050535034 0.97051512 0.2776747684 0.266543807 0.9664240617
## [118,] 0.6433614648 0.36087194 0.6963796993 0.918834622 0.3531340752
## [119,] 0.2367572135 0.17272380 0.1525280941 0.158405269 0.4478921206
## [120,] 0.8177070459 0.06781344 0.8534685026 0.882070255 0.8414704329
## [121,] 0.1466877575 0.60666854 0.0483842457 0.926833061 0.5968029357
## [122,] 0.4414088081 0.84723903 0.1771546872 0.431100848 0.1715827284
## [123,] 0.4670426873 0.03966405 0.7741287006 0.558894102 0.8676836449
## [124,] 0.0300916485 0.23343833 0.5230083202 0.618525907 0.9605299190
## [125,] 0.0392151230 0.79802973 0.4341217838 0.166037336 0.0593102747
## [126,] 0.9001475428 0.65451824 0.4522303506 0.688689560 0.3675610358
## [127,] 0.8362018380 0.50116233 0.6312501621 0.175361515 0.5925760376
## [128,] 0.8470265758 0.71253790 0.5891603490 0.975419685 0.3652717185
## [129,] 0.2678647079 0.10342315 0.3647346389 0.909009500 0.0297757376
## [130,] 0.9587960506 0.23518291 0.2019062326 0.532241529 0.7157959212
## [131,] 0.4600022698 0.73022207 0.1131236509 0.414374121 0.8094666060
## [132,] 0.5273683004 0.82198578 0.7729429379 0.568545850 0.1092382681
## [133,] 0.0440144374 0.90363424 0.9037799716 0.546293053 0.4159424896
## [134,] 0.4679640853 0.26353922 0.3373544100 0.349183248 0.8141559155
## [135,] 0.1414063205 0.99664356 0.7913745835 0.973997945 0.9028601535
## [136,] 0.5748593335 0.44375008 0.8216883291 0.188460874 0.6510244585
## [137,] 0.6756726061 0.07939840 0.8266744569 0.776164617 0.2154749332
## [138,] 0.9145728964 0.30350018 0.9608915544 0.475865776 0.4963175575
## [139,] 0.0715240112 0.71583105 0.0918946960 0.618039205 0.0715860131
## [140,] 0.8157287198 0.85875703 0.9928176054 0.531584403 0.8486959743
## [141,] 0.6820472272 0.41313318 0.6970799451 0.854936944 0.2782127012
## [142,] 0.3710516170 0.99118045 0.9396346337 0.723519275 0.6330758149
## [143,] 0.2674262021 0.79656099 0.6637258457 0.666746285 0.4912632110
## [144,] 0.2843670391 0.84887307 0.6303967142 0.316690173 0.8878658856
## [145,] 0.7137018254 0.58899192 0.5305541467 0.092495579 0.1832001559
## [146,] 0.9690435503 0.53360575 0.6721252596 0.349298890 0.8426887870
## [147,] 0.6294289615 0.44358872 0.5794394487 0.639810576 0.6580889346
## [148,] 0.5929452933 0.89499783 0.5835341320 0.450882327 0.8946816886
## [149,] 0.4446317928 0.38097749 0.9320464786 0.281816156 0.2838615119
## [150,] 0.3829061901 0.64052555 0.4812416276 0.878896176 0.9525804336
## [151,] 0.5733397978 0.46992891 0.1315456252 0.012754505 0.5067054278
## [152,] 0.4974252419 0.08763451 0.8467942206 0.751026510 0.3103524903
## [153,] 0.6742728173 0.49986488 0.9724769394 0.721768338 0.7699732522
## [154,] 0.6381821784 0.24028844 0.1936245409 0.720172594 0.0117464245
## [155,] 0.6147778442 0.22796039 0.3310023751 0.344389915 0.7022324987
## [156,] 0.4569275004 0.53049110 0.1396596800 0.610393548 0.9031030985
## [157,] 0.9736672535 0.45604827 0.5487407392 0.915319204 0.4894009400
## [158,] 0.2558556222 0.65409965 0.3647755324 0.065318733 0.8854222724
## [159,] 0.1219532513 0.32040507 0.7201853811 0.619322241 0.9727461447
## [160,] 0.8945050284 0.69859390 0.2394242082 0.246419065 0.4188268753
## [161,] 0.6760949122 0.33334220 0.6375201580 0.774997413 0.1318161637
## [162,] 0.8123158223 0.73111692 0.5000773973 0.367119963 0.9501041383
## [163,] 0.9259343066 0.23292368 0.9271361146 0.259555413 0.1426221761
## [164,] 0.9463072179 0.10379342 0.0331551160 0.028804634 0.5290053771
## [165,] 0.0201394730 0.23463693 0.1906394106 0.827841051 0.5603175892
## [166,] 0.6214252820 0.02997492 0.5058142536 0.616176529 0.6468932487
## [167,] 0.8151300617 0.90280720 0.5048924852 0.267728540 0.8466150395
## [168,] 0.0624606325 0.65355830 0.3523419020 0.673359978 0.1142218527
## [169,] 0.5977388639 0.40368400 0.8436545779 0.063083657 0.2129762396
## [170,] 0.7790744281 0.08589249 0.2681266193 0.380194020 0.9251562965
## [171,] 0.2827600357 0.49922343 0.6604047730 0.557247905 0.7277810839
## [172,] 0.5657137039 0.46026435 0.1205760995 0.056712000 0.8423992041
## [173,] 0.0542521959 0.92112180 0.2011775763 0.486514710 0.1401124126
## [174,] 0.0494483374 0.09807712 0.8047545305 0.016581599 0.5022441898
## [175,] 0.8501643089 0.07913134 0.7318138883 0.225057076 0.6948785074
## [176,] 0.6808816905 0.81887291 0.6250573434 0.534719604 0.7040045997
## [177,] 0.2547951911 0.63098973 0.8159708260 0.747319367 0.0589791671
## [178,] 0.9730533052 0.20021229 0.0775967727 0.716420032 0.4387958352
## [179,] 0.3491200847 0.63718297 0.9540340698 0.551741528 0.2457818224
## [180,] 0.7052616698 0.57138244 0.3340067817 0.998767941 0.0806682527
## [181,] 0.6870498080 0.15034549 0.4216069544 0.478697751 0.8349657920
## [182,] 0.9187876759 0.53865909 0.5313924984 0.537569566 0.7393872361
## [183,] 0.2217529775 0.16521848 0.4921393818 0.506609968 0.6385428559
## [184,] 0.2160414790 0.92910816 0.5632665888 0.960346226 0.8556066481
## [185,] 0.1948232700 0.18188819 0.2771232650 0.266835724 0.3000570547
## [186,] 0.8213695439 0.33460671 0.3775252469 0.682309793 0.8787130597
## [187,] 0.1480820312 0.66002950 0.7154957717 0.667719918 0.2787885005
## [188,] 0.7318276525 0.66536613 0.2819111748 0.841020002 0.8908672689
## [189,] 0.7898324579 0.93596142 0.3500172223 0.698694721 0.3091367013
## [190,] 0.1322375010 0.37564695 0.2178489063 0.757056098 0.5178186349
## [191,] 0.5834364993 0.32413921 0.6433858224 0.211357603 0.8302574239
## [192,] 0.5086603377 0.11196433 0.6203710833 0.879960810 0.6639030878
## [193,] 0.0988659069 0.68749795 0.7467523525 0.135246728 0.0620522315
## [194,] 0.8672402804 0.81558484 0.1257193224 0.312839136 0.2386996273
## [195,] 0.7658389821 0.69169510 0.5547427684 0.825970825 0.9783165071
## [196,] 0.2893580073 0.50272244 0.8046899692 0.933460313 0.4707282805
## [197,] 0.6529622299 0.10228810 0.1345007797 0.514091750 0.4843453881
## [198,] 0.9643053063 0.82615345 0.5867460433 0.660320126 0.8545839591
## [199,] 0.8453104680 0.29142220 0.7525011031 0.685744675 0.0941802256
## [200,] 0.1948899792 0.12765845 0.3744734686 0.714494400 0.2144597324
trainingData$x <- data.frame(trainingData$x)
featurePlot(trainingData$x, trainingData$y)testData <- mlbench.friedman1(5000, sd = 1)
testData$x <- data.frame(testData$x)
library(caret)
knnModel <- train(x = trainingData$x,y = trainingData$y,method = "knn",preProc = c("center", "scale"),tuneLength = 10)
knnModel## k-Nearest Neighbors
##
## 200 samples
## 10 predictor
##
## Pre-processing: centered (10), scaled (10)
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 200, 200, 200, 200, 200, 200, ...
## Resampling results across tuning parameters:
##
## k RMSE Rsquared MAE
## 5 3.810384 0.4095155 3.061924
## 7 3.735713 0.4407487 3.009587
## 9 3.680638 0.4748781 2.997298
## 11 3.655899 0.4978274 2.958415
## 13 3.646095 0.5150858 2.956582
## 15 3.645606 0.5290493 2.948794
## 17 3.676208 0.5292479 2.964985
## 19 3.693689 0.5373618 2.974160
## 21 3.715470 0.5416312 2.987362
## 23 3.747162 0.5371715 3.016582
##
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was k = 15.
knnPred <- predict(knnModel, newdata = testData$x)
postResample(pred = knnPred, obs = testData$y)## RMSE Rsquared MAE
## 3.3980407 0.6655215 2.7431634
library(earth)## Loading required package: Formula
## Loading required package: plotmo
## Loading required package: plotrix
## Loading required package: TeachingDemos
library(kernlab)##
## Attaching package: 'kernlab'
## The following object is masked from 'package:ggplot2':
##
## alpha
set.seed(34)
earthGrid <- expand.grid(.degree = 1:2,.nprune = 2:38)
earthModel <- train(trainingData$x,trainingData$y,method = "earth",tuneGrid = expand.grid(.degree = 1,.nprune = 2:25),trControl = trainControl(method = "cv"),preProcess = c("center","scale"))
set.seed(34)
svmRModel <- train(trainingData$x,trainingData$y,method = "svmRadial",tuneLength = 15,trControl = trainControl(method = "cv"),preProcess = c("center","scale"))
nnetGrid <- expand.grid(size = seq(1, 10),decay = c(0,.01,.1),bag = FALSE)
set.seed(34)
nnetModel <- train(trainingData$x,trainingData$y,method = "avNNet",tuneGrid = nnetGrid,preProc = c("center", "scale"),linout = TRUE,trace = FALSE,maxit = 1000,MaxNWts = 10 * (ncol(trainingData$x) + 1) + 10 + 1,trControl = trainControl(method = "cv"),preProcess = c("center","scale"))earthPred <- predict(earthModel, newdata = testData$x)
svmPred <- predict(svmRModel, newdata = testData$x)
nnetPred <- predict(nnetModel,newdata = testData$x)
model_summary <- rbind("KNN" = postResample(pred = knnPred, obs = testData$y),
"MARS" = postResample(pred = earthPred, obs = testData$y),
"SVM" = postResample(pred = svmPred, obs = testData$y),
"NNET" = postResample(pred = nnetPred, obs = testData$y))
model_summary## RMSE Rsquared MAE
## KNN 3.398041 0.6655215 2.743163
## MARS 1.856183 0.8621016 1.451647
## SVM 1.980424 0.8435895 1.540738
## NNET 2.111891 0.8245246 1.684878
varImp(knnModel)## loess r-squared variable importance
##
## Overall
## X4 100.0000
## X2 67.0138
## X1 62.3539
## X5 31.1975
## X3 25.2358
## X8 5.2805
## X9 3.2737
## X6 1.5019
## X10 0.7223
## X7 0.0000
varImp(earthModel)## earth variable importance
##
## Overall
## X4 100.00
## X2 69.57
## X1 37.68
## X5 12.28
## X3 0.00
varImp(svmRModel)## loess r-squared variable importance
##
## Overall
## X4 100.0000
## X2 67.0138
## X1 62.3539
## X5 31.1975
## X3 25.2358
## X8 5.2805
## X9 3.2737
## X6 1.5019
## X10 0.7223
## X7 0.0000
varImp(nnetModel)## loess r-squared variable importance
##
## Overall
## X4 100.0000
## X2 67.0138
## X1 62.3539
## X5 31.1975
## X3 25.2358
## X8 5.2805
## X9 3.2737
## X6 1.5019
## X10 0.7223
## X7 0.0000
The MARS Model yielded the best R^2 and RMSE values of the four models, while the Support Vector Machine and Neural Network models were also very productive.
All four models rank the variable importance (top 5) X4,X2,X1,X5,X3.
7.5
data(ChemicalManufacturingProcess)#summary(ChemicalManufacturingProcess)
gaps <- preProcess(ChemicalManufacturingProcess,method = "bagImpute")
ChemicalManufacturingProcess_cleaned <- predict(gaps,ChemicalManufacturingProcess)
#w3summary(ChemicalManufacturingProcess_cleaned)
cmp_c <- ChemicalManufacturingProcess_cleaned6.3 Models:
set.seed(34)
train_idx <- sample(c(TRUE,FALSE), nrow(cmp_c),
replace=TRUE, prob=c(0.7,0.3))
train_set <- cmp_c[train_idx,]
test_set <- cmp_c[!train_idx,]
train_yield <- data.frame(cmp_c[train_idx,1])
test_yield <- data.frame(cmp_c[!train_idx,1])
colnames(train_yield) <- c('Yield')
colnames(test_yield) <- c('Yield')
fit <- train(Yield ~.,train_set, method = "pls",tunelength = 25, trControl = trainControl(method = "cv"))
plot(fit)fit## Partial Least Squares
##
## 115 samples
## 57 predictor
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 104, 103, 103, 103, 104, 104, ...
## Resampling results across tuning parameters:
##
## ncomp RMSE Rsquared MAE
## 1 1.649621 0.2089658 1.351095
## 2 1.620623 0.2560571 1.313245
## 3 1.643336 0.2316565 1.333116
##
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was ncomp = 2.
fit2 <- train(Yield ~.,train_set, method = "pls",metric = "Rsquared",tunelength = 25, trControl = trainControl(method = "cv"),preProcess = c("center","scale"))
plot(fit2)fit2## Partial Least Squares
##
## 115 samples
## 57 predictor
##
## Pre-processing: centered (57), scaled (57)
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 103, 105, 104, 103, 104, 104, ...
## Resampling results across tuning parameters:
##
## ncomp RMSE Rsquared MAE
## 1 1.362092 0.4312490 1.119568
## 2 1.268019 0.5272961 1.022282
## 3 1.207426 0.5663559 0.988988
##
## Rsquared was used to select the optimal model using the largest value.
## The final value used for the model was ncomp = 3.
Results from PLS, LARS and Enet Models:
r2_eval <- data.frame(0.305,0.6699,0.6699)
colnames(r2_eval) <- c("PLS","LARS Model","enetTune Model")
r2_train <- data.frame(0.573,0.561,0.578)
colnames(r2_train) <- c("PLS","LARS Model","enetTune Model")
r2_eval <- rbind(r2_eval,r2_train)
colnames(r2_eval) <- c("PLS","LARS Model","enetTune Model")
rownames(r2_eval) <- c("R-Squared: Actual","R-Squared: Trained")
kbl(r2_eval, longtable = T, booktabs = T, caption = "Model Performance Summary") %>%
kable_styling(latex_options = c("repeat_header"))| PLS | LARS Model | enetTune Model | |
|---|---|---|---|
| R-Squared: Actual | 0.305 | 0.6699 | 0.6699 |
| R-Squared: Trained | 0.573 | 0.5610 | 0.5780 |
set.seed(34)
fit3 <- train(Yield ~.,train_set, method = "lars",metric = "Rsquared",tuneLength = 20, trControl = trainControl(method = "cv"),preProc = c("center","scale"))
#fit3
plot(fit3)fit3_predict <- predict(fit3,test_set[,-1])
postResample(fit3_predict,test_set[,1])## RMSE Rsquared MAE
## 1.2282224 0.6154882 0.9188517
varImp(fit3)## loess r-squared variable importance
##
## only 20 most important variables shown (out of 57)
##
## Overall
## ManufacturingProcess32 100.00
## ManufacturingProcess13 69.87
## BiologicalMaterial06 58.29
## ManufacturingProcess17 53.74
## ManufacturingProcess36 53.69
## ManufacturingProcess09 51.94
## BiologicalMaterial03 50.78
## BiologicalMaterial02 43.62
## ManufacturingProcess31 42.24
## ManufacturingProcess27 40.03
## ManufacturingProcess20 39.46
## BiologicalMaterial01 36.25
## BiologicalMaterial12 34.52
## ManufacturingProcess29 33.40
## ManufacturingProcess06 32.82
## ManufacturingProcess33 31.19
## ManufacturingProcess12 28.84
## BiologicalMaterial04 26.79
## ManufacturingProcess02 24.80
## BiologicalMaterial09 22.41
train_set_data <- train_set[,-c(1)]
test_set_data <- test_set[,-c(1)]
knnModel <- train(x = train_set_data,y = train_yield$Yield,method = "knn",preProc = c("center", "scale"),tuneLength = 10)
knnModel## k-Nearest Neighbors
##
## 115 samples
## 57 predictor
##
## Pre-processing: centered (57), scaled (57)
## Resampling: Bootstrapped (25 reps)
## Summary of sample sizes: 115, 115, 115, 115, 115, 115, ...
## Resampling results across tuning parameters:
##
## k RMSE Rsquared MAE
## 5 1.438517 0.3342477 1.129490
## 7 1.414401 0.3454644 1.124603
## 9 1.403646 0.3502420 1.128724
## 11 1.415587 0.3372328 1.142322
## 13 1.428026 0.3284130 1.153898
## 15 1.434032 0.3267271 1.163902
## 17 1.438808 0.3231208 1.169967
## 19 1.444505 0.3217963 1.177725
## 21 1.453591 0.3139859 1.184738
## 23 1.468961 0.3019866 1.197597
##
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was k = 9.
knnPred <- predict(knnModel, newdata = test_set_data)
postResample(pred = knnPred, obs = test_yield$Yield)## RMSE Rsquared MAE
## 1.4916810 0.4423442 1.1671512
set.seed(34)
earthGrid <- expand.grid(.degree = 1:2,.nprune = 2:38)
earthModel <- train(x = train_set_data,y = train_yield$Yield,method = "earth",tuneGrid = expand.grid(.degree = 1,.nprune = 2:25),trControl = trainControl(method = "cv"),preProcess = c("center","scale"))
set.seed(34)
svmRModel <- train(x = train_set_data,y = train_yield$Yield,method = "svmRadial",tuneLength = 15,trControl = trainControl(method = "cv"),preProcess = c("center","scale"))
nnetGrid <- expand.grid(size = seq(1, 10),decay = c(0,.01,.1),bag = FALSE)
set.seed(34)
nnetModel <- train(x = train_set_data,y = train_yield$Yield,method = "avNNet",tuneGrid = nnetGrid,preProc = c("center", "scale"),linout = TRUE,trace = FALSE,maxit = 1000,MaxNWts = 10 * (ncol(trainingData$x) + 1) + 10 + 1,trControl = trainControl(method = "cv"),preProcess = c("center","scale"))earthPred <- predict(earthModel, newdata = test_set_data)
svmPred <- predict(svmRModel, newdata = test_set_data)
nnetPred <- predict(nnetModel,newdata = test_set_data)
model_summary <- rbind("KNN" = postResample(pred = knnPred, obs = test_yield$Yield),
"MARS" = postResample(pred = earthPred, obs = test_yield$Yield),
"SVM" = postResample(pred = svmPred, obs = test_yield$Yield),
"NNET" = postResample(pred = nnetPred, obs = test_yield$Yield))
model_summary## RMSE Rsquared MAE
## KNN 1.491681 0.4423442 1.167151
## MARS 1.741594 0.3981110 1.126143
## SVM 1.448693 0.4763135 1.072114
## NNET 1.575532 0.4860350 1.207509
Analysis
Similar to the PLS, Enet, and LARS models, the nonlinear regression models were not particularly successful. The best \(R^2\) value was obtained with the SVM model, with an \(R^2\) of 0.470. These models alone could not be recommended. Below, I will reduce the models to only include the top 10 most important variables from the SVM model.
B
The top ten ranking of the variable importance of the SVM model is identical to that of the LARS model from the original linear models. The manufacturing processes dominate the list.
varImp(svmRModel)## loess r-squared variable importance
##
## only 20 most important variables shown (out of 57)
##
## Overall
## ManufacturingProcess32 100.00
## ManufacturingProcess13 69.87
## BiologicalMaterial06 58.29
## ManufacturingProcess17 53.74
## ManufacturingProcess36 53.69
## ManufacturingProcess09 51.94
## BiologicalMaterial03 50.78
## BiologicalMaterial02 43.62
## ManufacturingProcess31 42.24
## ManufacturingProcess27 40.03
## ManufacturingProcess20 39.46
## BiologicalMaterial01 36.25
## BiologicalMaterial12 34.52
## ManufacturingProcess29 33.40
## ManufacturingProcess06 32.82
## ManufacturingProcess33 31.19
## ManufacturingProcess12 28.84
## BiologicalMaterial04 26.79
## ManufacturingProcess02 24.80
## BiologicalMaterial09 22.41
train_set_new <- data.frame(cbind(train_set$ManufacturingProcess32,train_set$ManufacturingProcess13,train_set$BiologicalMaterial06,train_set$ManufacturingProcess17,train_set$ManufacturingProcess36,train_set$ManufacturingProcess09,train_set$BiologicalMaterial03,train_set$BiologicalMaterial02,train_set$ManufacturingProcess31,train_set$ManufacturingProcess27))
test_set_new <- data.frame(cbind(test_set$ManufacturingProcess32,test_set$ManufacturingProcess13,test_set$BiologicalMaterial06,test_set$ManufacturingProcess17,test_set$ManufacturingProcess36,test_set$ManufacturingProcess09,test_set$BiologicalMaterial03,test_set$BiologicalMaterial02,test_set$ManufacturingProcess31,test_set$ManufacturingProcess27))
colnames(train_set_new) <- c("ManufacturingProcess32","ManufacturingProcess13","BiologicalMaterial06","ManufacturingProcess17","ManufacturingProcess36","ManufacturingProcess09","BiologicalMaterial03","BiologicalMaterial02","ManufacturingProcess31","ManufacturingProcess27")
colnames(test_set_new) <- c("ManufacturingProcess32","ManufacturingProcess13","BiologicalMaterial06","ManufacturingProcess17","ManufacturingProcess36","ManufacturingProcess09","BiologicalMaterial03","BiologicalMaterial02","ManufacturingProcess31","ManufacturingProcess27")train_set_data <- train_set_new
test_set_data <- test_set_new
knnModel <- train(x = train_set_data,y = train_yield$Yield,method = "knn",preProc = c("center", "scale"),tuneLength = 10)
#knnModel
knnPred <- predict(knnModel, newdata = test_set_data)
postResample(pred = knnPred, obs = test_yield$Yield)## RMSE Rsquared MAE
## 1.336530 0.570262 1.053800
set.seed(34)
earthGrid <- expand.grid(.degree = 1:2,.nprune = 2:38)
earthModel <- train(x = train_set_data,y = train_yield$Yield,method = "earth",tuneGrid = expand.grid(.degree = 1,.nprune = 2:25),trControl = trainControl(method = "cv"),preProcess = c("center","scale"))
set.seed(34)
svmRModel <- train(x = train_set_data,y = train_yield$Yield,method = "svmRadial",tuneLength = 15,trControl = trainControl(method = "cv"),preProcess = c("center","scale"))
nnetGrid <- expand.grid(size = seq(1, 10),decay = c(0,.01,.1),bag = FALSE)
set.seed(34)
nnetModel <- train(x = train_set_data,y = train_yield$Yield,method = "avNNet",tuneGrid = nnetGrid,preProc = c("center", "scale"),linout = TRUE,trace = FALSE,maxit = 1000,MaxNWts = 10 * (ncol(trainingData$x) + 1) + 10 + 1,trControl = trainControl(method = "cv"),preProcess = c("center","scale"))earthPred <- predict(earthModel, newdata = test_set_data)
svmPred <- predict(svmRModel, newdata = test_set_data)
nnetPred <- predict(nnetModel,newdata = test_set_data)
model_summary <- rbind("KNN" = postResample(pred = knnPred, obs = test_yield$Yield),
"MARS" = postResample(pred = earthPred, obs = test_yield$Yield),
"SVM" = postResample(pred = svmPred, obs = test_yield$Yield),
"NNET" = postResample(pred = nnetPred, obs = test_yield$Yield))
model_summary## RMSE Rsquared MAE
## KNN 1.336530 0.5702620 1.0538003
## MARS 1.157222 0.6776290 0.9666953
## SVM 1.629982 0.3578805 1.1799044
## NNET 1.184638 0.6405817 0.9179897
After only selecting the top 10 most important variables from the SVM model, the performance of the MARS and NNet models improved to produce \(R^2\) values above 0.64.
featurePlot(train_set_data, train_yield$Yield)C
Most of the relationships between yield and the most important variables follow a linear pattern, whether positive or negative correlation. It seems that biological material is actually more likely to be positively correlated with yield than the various processes.
Some of the manufacturing processes are negatively correlated with yield, but the two most impactful processes are positively correlated with yield.
The nonlinear models provided a slightly poorer fit than the linear models, and none of the four nonlinear models can be recommended over the linear models from assignment 7.