★ represents a unique aspect of this method, or indicates that the code requires special attention.
Data
This dataset is provided as a sample for demonstration purposes.
# Import data
dropout_data <- read.csv ("dropout_data.csv" )
View (dropout_data)
# Assign variable labels using attr()
attr (dropout_data$ SES, "label" ) <- "Socioeconomic Status (1-100)"
attr (dropout_data$ ParentEdu, "label" ) <- "Parental Education Level (1-5)"
attr (dropout_data$ Attendance, "label" ) <- "Attendance Rate (percent)"
attr (dropout_data$ HomeworkHours, "label" ) <- "Hours Spent on Homework (per week)"
attr (dropout_data$ Motivation, "label" ) <- "Student Motivation (1-15)"
attr (dropout_data$ PeerSupport, "label" ) <- "Peer Support (0-15)"
attr (dropout_data$ TestAnxiety, "label" ) <- "Test Anxiety (0-15)"
attr (dropout_data$ ExtraCurricular, "label" ) <- "Extracurricular Activities (number)"
attr (dropout_data$ MathSelfEfficacy, "label" ) <- "Math Self-Efficacy (0-15)"
attr (dropout_data$ FavoriteNumber, "label" ) <- "Favorite Number (Random Unrelated Variable)"
attr (dropout_data$ Dropout, "label" ) <- "Dropout (1=Dropout; 0=NotDropout)"
# Check the structure of variables in this dataset
str (dropout_data)
'data.frame': 1000 obs. of 11 variables:
$ SES : num 37.3 58.9 46.9 58.2 60.2 ...
..- attr(*, "label")= chr "Socioeconomic Status (1-100)"
$ ParentEdu : int 3 5 4 1 3 2 4 2 1 2 ...
..- attr(*, "label")= chr "Parental Education Level (1-5)"
$ Attendance : num 82 80.2 87.6 86.3 86.5 ...
..- attr(*, "label")= chr "Attendance Rate (percent)"
$ HomeworkHours : num 4.9 2.02 2.32 3.58 3.28 ...
..- attr(*, "label")= chr "Hours Spent on Homework (per week)"
$ Motivation : num 8.33 5.53 6.66 5.8 7.9 ...
..- attr(*, "label")= chr "Student Motivation (1-15)"
$ PeerSupport : num 7.05 7.79 8.35 6.36 11.73 ...
..- attr(*, "label")= chr "Peer Support (0-15)"
$ TestAnxiety : num 7.91 3.78 4.92 7.33 5.25 ...
..- attr(*, "label")= chr "Test Anxiety (0-15)"
$ ExtraCurricular : int 11 3 6 6 6 9 10 12 13 7 ...
..- attr(*, "label")= chr "Extracurricular Activities (number)"
$ MathSelfEfficacy: num 8.4 5.18 6.68 6.12 8.07 ...
..- attr(*, "label")= chr "Math Self-Efficacy (0-15)"
$ FavoriteNumber : int 687 907 893 198 529 896 23 343 563 712 ...
..- attr(*, "label")= chr "Favorite Number (Random Unrelated Variable)"
$ Dropout : int 0 0 0 0 0 0 0 0 0 0 ...
..- attr(*, "label")= chr "Dropout (1=Dropout; 0=NotDropout)"
Remove ‘FavoriteNumber’ Variable
# Load the dplyr package
library (dplyr)
Attaching package: 'dplyr'
The following objects are masked from 'package:stats':
filter, lag
The following objects are masked from 'package:base':
intersect, setdiff, setequal, union
# Example data frame
#df <- data.frame(a = 1:5, b = 6:10, c = 11:15)
# Remove column 'b'
#df <- df %>% select(-b)
#Remove FavoriteNumber
dropout_data_new <- dropout_data %>% select (- FavoriteNumber)
# View the result
print (dropout_data_new)
SES ParentEdu Attendance HomeworkHours Motivation PeerSupport
1 37.32338 3 82.01397 4.8972188 8.3253876 7.0469820
2 58.92681 5 80.15557 2.0223786 5.5268894 7.7920209
3 46.85475 4 87.55442 2.3187275 6.6618207 8.3486654
4 58.24271 1 86.30704 3.5773818 5.7951508 6.3561422
5 60.17920 3 86.45678 3.2781019 7.9022356 11.7263730
6 58.43493 2 85.86751 1.7571708 8.8391747 9.3044500
7 40.05218 4 86.06715 3.5368386 6.5493120 7.1123627
8 53.17541 2 78.28039 4.3445794 7.8496835 9.0684703
9 48.86595 1 74.82533 3.1053942 9.0893723 8.2546447
10 36.95363 2 88.31835 3.1767968 6.5271705 5.7457809
11 52.21128 1 73.70358 3.9603764 5.4657170 7.4615343
12 40.17518 4 75.20999 4.0091375 9.1315383 10.8321444
13 56.07302 4 80.15090 4.0863855 7.6552406 10.1184182
14 49.94705 5 78.39070 3.4470248 4.9629180 10.7018119
15 53.31880 4 87.45312 2.4016789 7.4493789 10.4315519
16 48.94326 5 79.05265 3.5573602 3.9125730 6.7315390
17 57.92878 1 84.52066 2.6433797 5.6436362 10.0592616
18 48.48713 2 74.53696 3.2864383 6.5280558 8.6687642
19 59.07016 1 82.10933 3.3355498 6.5182197 8.2460041
20 30.12262 4 89.81807 2.8851167 5.8965528 8.1754748
21 35.65495 2 81.70179 5.0778903 7.2673171 12.4996960
22 40.21429 3 89.56975 6.0752573 9.5173673 6.4381939
23 52.17657 3 90.00207 4.0560640 9.2251492 8.3418140
24 61.93085 3 87.87512 0.5826708 5.2878371 6.5018624
25 64.43665 5 83.64373 3.0998319 9.7816916 5.7729274
26 47.16128 4 86.00395 4.4918292 6.0205735 7.4490871
27 47.35634 1 84.19099 2.8700049 5.6082182 4.9930926
28 39.65964 2 84.65892 5.2400102 5.4445755 5.1037313
29 41.86841 4 86.88930 3.4639290 4.4002034 7.4189253
30 30.25594 3 82.48530 2.8212076 9.6624194 7.1710903
31 61.16833 4 87.63952 2.6010724 7.4433216 6.5292947
32 62.67824 3 87.77742 6.1871430 7.3956699 4.6752858
33 60.13597 3 88.60807 3.2661506 1.9211165 7.1978269
34 45.03134 3 78.54434 3.7564004 4.6217907 4.7847199
35 48.38794 2 86.05462 3.9527835 3.1427911 9.9936288
36 49.36940 4 74.43676 2.3682585 7.5661897 6.6981450
37 37.90287 3 89.20025 2.3815894 7.0993334 6.8061019
38 41.06928 4 83.29106 4.6929835 6.3879306 10.3020278
39 51.64838 5 83.76192 2.7688554 6.0807086 7.9534869
40 52.43658 2 92.22659 3.3546551 7.1559085 10.7069053
41 30.51741 5 74.88506 3.4695151 6.5292656 8.5833812
42 34.73714 1 85.75280 3.5729830 4.4610732 8.8710789
43 49.81975 5 82.36054 2.5309206 7.8159227 5.7576717
44 47.01883 2 85.91639 3.5016934 4.5698323 8.3440636
45 35.27271 4 87.07290 2.7848954 5.8444817 7.4973929
46 60.53967 1 82.13756 2.6676988 7.6365368 6.1115558
47 48.92222 3 80.08355 1.9010984 6.0477141 7.9094426
48 47.20907 3 88.68343 4.4478344 13.8299604 7.7968518
49 66.06098 3 78.76604 3.4999545 6.8005103 6.2525610
50 56.89379 3 80.33609 3.5209034 5.6175718 9.6995238
51 67.27187 3 86.63452 4.5478734 7.6648698 8.4896958
52 39.78089 4 81.20935 2.6244019 6.5970875 8.7399160
53 73.41840 2 85.65444 3.9046396 5.9442696 7.3325048
54 41.62892 2 81.64787 4.3112017 9.1376862 7.3989667
55 54.38030 4 81.17674 4.3281502 6.1127432 7.7084312
56 41.39951 1 81.13546 3.3659886 7.8944369 7.4046853
57 61.44644 3 79.22642 2.1560808 5.2756048 8.8277993
58 52.62846 4 82.00618 4.5941765 5.3913498 8.9162138
59 46.69437 3 76.93015 2.5952282 5.3547759 6.8840288
60 58.38330 5 92.35650 4.9752443 8.0751543 8.1839547
61 36.17538 4 83.13301 1.3920614 9.8116251 8.4568394
62 47.37492 3 81.95815 4.8755899 4.9736775 8.5377154
63 64.25732 1 80.98182 2.3763072 8.4996677 11.0178604
64 44.14808 4 80.51708 3.9181947 7.0723781 10.3620999
65 51.17213 3 87.86286 4.2711802 8.2899376 4.5249210
66 56.24963 2 79.83494 2.9816761 8.0465338 7.5662841
67 32.51251 2 79.06737 2.7547602 7.3921696 5.6331202
68 44.45132 1 82.87586 2.7327145 10.4191853 5.8197732
69 49.07546 1 82.67973 3.4682487 8.6470331 10.0485449
70 47.23357 3 77.70953 2.8692588 5.7093008 7.1189894
71 35.67150 1 86.44850 3.3678485 3.6341313 5.4526584
72 49.52574 1 87.32704 4.6758105 6.9105445 8.4157193
73 50.40291 1 78.62929 3.8435632 6.7212409 9.4469211
74 58.98367 4 78.60940 2.7018142 4.8301588 9.3248686
75 61.11415 1 77.98908 4.7888621 7.2768683 4.0320490
76 66.52231 5 89.08927 3.8009077 7.3011895 7.3039818
77 44.89107 1 79.19652 3.0627871 6.2342984 7.4647720
78 65.77507 4 72.32748 4.5651407 8.1539971 8.6329279
79 63.58457 4 76.61058 5.8100002 6.4925604 4.5803638
80 56.56619 1 81.06899 2.5570247 8.6596057 7.6937112
81 54.78035 1 79.06376 3.3915541 7.0473557 7.4082672
82 29.58033 5 89.49665 2.8120832 9.4811517 12.5024281
83 37.22604 4 78.39603 4.7214174 3.0208387 9.6230775
84 52.21899 5 79.62991 2.7845654 7.9514347 6.7045670
85 68.81073 5 83.36866 5.3845509 5.3239023 8.5984295
86 48.43555 4 82.96688 3.3026071 7.8852372 5.0597390
87 31.82730 1 81.26921 3.5112190 5.7623484 12.6569320
88 62.04912 3 76.03116 3.2523691 5.9762651 7.0303093
89 53.47196 5 82.68135 4.3897741 7.5829425 10.3891931
90 37.34370 3 85.12432 5.0420757 10.0993493 4.7803165
91 57.10115 2 88.38537 4.2503952 8.8168432 10.9575651
92 67.41463 1 70.30169 3.1077233 2.6892819 5.0780354
93 31.34190 1 76.52000 4.2974912 9.1211127 9.3915011
94 45.13661 5 84.97710 4.3732066 6.1378692 8.9830403
95 48.57352 2 87.42104 4.0253084 7.5748507 6.8982684
96 59.71679 5 75.94338 3.2447796 7.1456712 6.5439234
97 50.98716 2 79.82120 3.1819897 7.0102903 3.6878067
98 40.77192 4 82.44520 3.6821719 5.0532553 8.5518607
99 57.82004 2 81.26025 3.9227761 3.6355894 9.1827644
100 48.69958 3 86.08081 3.6556179 7.8669224 8.5548451
101 36.71775 4 81.75955 4.1827844 8.9597431 10.2396609
102 52.45025 3 84.66585 4.1047520 5.9760353 5.9833044
103 59.11720 3 70.23540 3.1918341 6.6354553 6.3163841
104 45.45218 5 90.04588 3.4156343 8.9605170 10.3021802
105 55.52156 3 85.82498 3.0507932 6.2014747 5.5284795
106 48.47362 4 81.08306 3.4173350 6.0522251 10.4065266
107 58.00308 5 85.74013 2.6342755 5.8422435 6.8557608
108 50.73480 3 85.76726 5.0898726 9.5747414 5.7360754
109 36.66027 5 82.29180 3.2208830 7.0827404 3.8485435
110 43.91294 5 83.60497 2.7152690 8.7089267 9.7736900
111 42.04458 3 82.51456 4.9855702 6.3632716 6.0420841
112 56.72934 2 70.09261 3.1800410 7.5850693 10.2084141
113 31.56804 3 79.73723 2.6778580 10.4655050 5.5011523
114 50.99021 3 78.34035 2.3562221 10.5732752 6.9383848
115 43.33800 2 80.20393 2.1396782 8.9529414 6.7081574
116 46.37597 5 77.60720 2.7994733 8.7852774 9.8118934
117 36.03246 3 81.81758 3.1125034 6.6329364 6.3636604
118 58.98128 5 84.45857 2.3419742 7.3826561 7.5387579
119 39.97298 2 77.00957 2.9885022 5.7515479 11.0642934
120 51.49240 3 89.20511 4.7919566 4.4232644 4.9909938
121 70.42426 3 80.27525 3.6026019 7.1691852 10.3779711
122 43.07794 3 85.48911 2.2268464 7.4242738 9.6677789
123 33.27907 5 80.99461 3.1873070 5.2393557 5.6477702
124 58.59118 2 84.23416 4.4351050 8.0887023 7.0130685
125 44.58912 2 83.13614 2.5812285 4.6063866 9.8921104
126 50.11296 2 86.32427 2.2850188 5.8422217 8.0511524
127 50.88591 4 83.25027 4.3190150 4.7776381 5.9168380
128 47.66770 1 80.86108 2.3200732 8.3596142 7.3851549
129 75.44125 5 87.65884 4.1471643 9.6518789 4.9093717
130 49.30953 4 87.70110 4.0966475 8.3567131 6.1979163
131 38.88961 2 85.92929 2.5344441 5.8972311 5.2695505
132 44.33520 5 87.95819 4.7210024 6.8522858 7.1643296
133 46.41448 5 84.13754 2.7458156 7.2522633 8.9465675
134 53.00317 2 79.05783 4.1869156 5.2709541 9.3244117
135 58.66669 3 79.72966 4.0908496 7.5760807 10.2906623
136 53.18404 1 82.57367 3.4814123 6.0964942 10.6644303
137 47.68797 1 80.01659 4.1590699 6.0412281 9.6975166
138 37.51847 5 81.53622 3.2043074 5.5968201 8.5655391
139 54.20516 3 81.56670 4.2535193 9.9867437 8.6259383
140 48.80080 3 85.43690 4.2701705 8.0588390 6.8019632
141 45.65463 1 85.44029 2.4976048 2.8934180 6.9541458
142 27.82373 3 83.27882 4.1836735 7.0873734 7.6218690
143 59.87630 1 70.48594 2.9435531 7.8366735 8.6966447
144 55.55321 1 83.27077 2.9276670 4.4963751 7.1599913
145 44.19687 3 80.03276 3.3161866 9.7905091 10.6040864
146 55.44539 1 70.54485 4.8716865 12.6711953 5.6335104
147 28.88951 3 82.81063 4.4543799 8.3018000 4.9851791
148 45.46474 1 88.33725 2.4572984 6.0336764 6.6146988
149 57.08446 5 83.08567 4.3810656 7.8445950 8.0989665
150 47.89881 5 84.69269 1.9867477 6.9701948 9.3502574
151 45.29114 3 84.88599 4.8459516 10.1276790 10.9017923
152 65.37787 1 81.08170 4.1679756 4.4329483 8.0887499
153 33.89602 5 89.08630 3.8656489 5.1250092 3.7885709
154 56.75105 1 82.88736 3.9944065 8.0491265 9.2758945
155 43.72287 2 77.77697 5.0651266 7.7327342 5.8933177
156 47.30895 4 77.47076 4.0812083 8.1298776 6.3825789
157 62.07717 2 80.81583 3.4845739 5.2008885 7.0628201
158 47.32974 4 81.83380 2.0849330 5.5883502 9.1310125
159 54.14745 1 78.02822 4.3732581 8.0140983 9.2007292
160 57.12973 1 79.06997 2.8686118 3.4589949 9.0728584
161 70.70020 3 76.20033 2.2816441 5.7992973 8.7572335
162 28.56980 4 82.87903 3.1452401 7.9707596 2.4904862
163 39.46196 2 79.03595 2.9477023 11.7379564 7.1944746
164 33.53453 4 90.30887 3.3445061 8.4256438 8.4854734
165 35.37358 1 79.44309 2.5091423 7.6640614 6.2579506
166 39.20106 4 86.53161 3.7615593 4.8746268 7.6694702
167 35.05152 1 79.38119 4.0653943 6.5729971 4.0870206
168 45.18605 3 81.79089 4.1788599 6.6462538 10.7907909
169 62.05890 5 84.91420 3.9101965 10.9741679 4.0696771
170 38.71593 3 78.52684 1.4147627 8.3039687 8.7550626
171 64.11160 3 83.99773 4.0030464 5.9930237 8.0324429
172 64.16786 3 78.41153 3.9198187 5.8566078 8.7338788
173 59.59333 2 82.17995 3.2534143 6.7832593 5.5170099
174 33.30466 5 80.61202 3.9731717 5.9104601 4.9987489
175 56.57573 2 80.35205 3.2423357 5.9646369 5.5602476
176 31.71674 3 78.21080 3.9134044 5.4265084 11.5067317
177 52.91744 1 85.52055 3.8074100 6.9883856 10.5571721
178 63.24200 4 84.74253 4.3194408 6.1511844 5.6620115
179 71.94937 2 85.61404 2.9205442 8.2753593 9.4945062
180 44.75475 5 76.38988 3.5944863 8.2870569 5.7272277
181 49.72341 5 95.39283 4.8142574 7.0255079 5.5434734
182 44.45525 3 85.20180 3.7130320 6.3700188 8.1585852
183 53.66690 1 79.66175 3.9171746 12.3343421 10.2074847
184 37.95363 4 76.53115 4.1186591 9.3026360 4.4837198
185 40.90214 5 80.62201 2.2457822 6.6642129 7.0228097
186 66.88778 4 85.09209 4.4982966 7.1963556 8.1153816
187 39.63422 5 80.75920 4.3334402 8.3696065 9.0396276
188 53.40869 4 81.21205 3.1584304 5.1952334 7.6865049
189 40.82703 2 86.58977 3.7087176 10.1236557 6.3735687
190 48.29160 4 82.92748 3.8672505 3.7879757 6.9122819
191 69.96995 2 79.89273 3.7062340 8.8354036 8.5060084
192 79.34735 2 90.27208 3.9112319 8.7506979 6.3030436
193 33.34362 4 92.85203 4.1440455 4.6151345 8.9799523
194 56.96958 2 84.14223 4.4737309 8.3571921 10.0296580
195 37.79948 5 77.62367 3.7053160 6.3285818 8.3996101
196 64.65549 2 85.83601 2.9894467 6.1663026 5.6804001
197 27.00910 5 78.89721 3.6449127 7.5918423 7.8714385
198 43.49055 1 75.05709 3.9058650 6.3686819 10.4642981
199 43.65778 2 92.83350 4.7467706 7.4592324 4.9687926
200 28.27559 3 79.90474 2.7704276 7.6746330 5.7308631
201 30.10315 2 76.48139 1.0973196 5.2099534 8.2469571
202 74.65965 1 85.25020 4.7010106 9.9480657 4.9292892
203 30.15539 5 84.86358 4.4610868 8.8728827 9.7107543
204 32.78476 2 81.71151 3.2174722 4.0302607 7.2254042
205 51.05069 3 90.42666 3.5834112 8.6819965 8.9562417
206 50.05922 5 86.07155 3.9415963 8.6078990 6.7842508
207 53.65288 5 89.55401 2.8787004 4.8777333 10.0961792
208 41.46670 3 85.07472 4.7861373 9.4472490 5.0047559
209 63.49469 3 84.40776 2.4996637 7.1568944 8.3488614
210 43.78033 5 79.26093 4.3251010 4.4211453 7.2381868
211 49.11798 1 85.68503 2.4351776 4.7600708 11.2117971
212 69.33717 3 79.27522 2.9336466 9.1222717 8.6721512
213 61.21918 1 88.94005 5.2545750 9.3849627 7.4912448
214 50.48345 3 85.45505 3.1505089 4.2263428 8.3892967
215 52.33301 1 87.87453 2.5405845 8.3887012 10.2145104
216 41.44752 4 80.41379 4.6058887 6.8010698 4.8594969
217 41.78449 2 86.07734 5.3078278 6.6333259 7.7322944
218 59.18604 5 75.83314 4.3285425 4.3829158 9.9002929
219 43.39209 4 77.47146 4.3506499 10.8964991 10.2159846
220 53.68796 2 86.05654 5.3157231 6.7330012 7.9368539
221 52.76726 4 75.51709 2.8863231 5.6194097 7.1786413
222 49.99481 1 76.70263 2.9662924 4.5757548 8.2371852
223 59.35334 3 76.26695 4.4033475 6.9855518 2.6186103
224 52.08261 4 77.96804 4.0064013 4.1181437 8.8546079
225 55.25699 1 80.34413 2.3907140 6.0378964 4.1033498
226 35.52665 2 77.32852 5.4049133 7.4319163 7.6675553
227 42.36537 3 88.51401 2.9037024 10.1709113 7.3841430
228 54.52622 4 80.27399 2.3370157 8.0740122 6.5795296
229 43.27349 3 84.21785 1.9990721 8.6023413 7.1755732
230 60.97044 5 78.22843 4.0471316 10.2989874 6.5186334
231 44.86647 4 83.43385 5.0537730 6.2941385 9.2757614
232 43.53866 3 86.91533 3.5380249 5.2484267 11.7544167
233 50.59540 4 89.95969 3.3577284 9.2219558 7.3355248
234 42.56104 2 84.60240 2.3666682 6.4621560 9.9857193
235 37.82293 2 83.27815 2.7816047 5.8050031 9.3643756
236 46.08316 3 80.52468 3.4428398 9.4896392 9.4392958
237 42.62845 3 86.13851 3.1354561 8.7045397 6.9607351
238 39.59600 2 79.69760 2.7669743 7.1478533 7.8798750
239 56.28786 3 88.83135 4.3646305 7.1885603 14.1708979
240 44.72931 4 82.87358 2.4108329 4.9156838 8.8860294
241 50.62138 3 79.94499 4.1368895 9.4998566 6.2895211
242 44.93118 3 83.37920 2.1878432 5.4259736 6.1048530
243 53.22975 2 87.51611 3.8077647 7.8554200 8.7591376
244 54.15356 3 92.07890 4.7897198 5.6739028 6.8305080
245 42.77465 2 81.18602 2.4623742 10.0243103 6.2006283
246 52.58672 3 80.95559 3.1840659 9.0629809 6.6372161
247 21.95723 1 83.28135 4.1325712 7.8774936 7.2194156
248 36.91866 5 87.91893 3.3440068 7.0723383 8.5946258
249 57.80147 1 88.58130 5.2820705 9.3899846 7.2693053
250 68.34534 4 80.89182 3.7983471 4.5266137 8.5506782
251 59.28460 1 89.79317 4.7002198 10.2472803 6.2936580
252 53.60214 5 81.51981 3.6893413 5.0488104 6.1579170
253 46.85209 1 76.38197 1.7366717 10.2336676 4.4795414
254 49.81187 2 78.72764 4.0788049 3.3635794 11.2032330
255 49.73361 4 87.38661 4.5217837 8.8822924 5.8604016
256 57.01215 2 82.91718 3.2304116 6.5759460 7.8647947
257 63.40694 4 87.38713 3.2616039 8.0156441 6.5921651
258 39.85112 5 76.47054 3.4783655 5.7287273 7.9160007
259 50.29792 3 90.72874 1.8996483 7.9987516 10.1791567
260 57.13069 5 81.05798 2.6461313 4.0665535 6.7322549
261 67.75584 4 87.27098 4.3355602 5.5733033 10.8047627
262 42.31781 5 81.05143 3.4877247 6.6122366 9.6207803
263 63.01508 3 80.68174 2.6569649 10.6599723 8.0418326
264 44.53803 1 83.21113 2.2780742 7.1957121 10.3619914
265 44.90637 2 75.05383 3.3784175 7.7974647 9.2769103
266 34.21204 2 88.49854 4.7111741 5.8851283 7.2583192
267 39.61601 2 83.01641 5.4051049 8.1610521 3.9797189
268 42.26864 3 81.17960 2.9383532 5.6788116 6.0940998
269 38.16226 2 81.23388 4.0836304 6.6533620 6.8461221
270 50.39438 2 85.93865 4.0946767 9.0174492 8.3376260
271 57.13328 5 87.14163 2.8804706 6.7117344 10.1388241
272 38.47194 4 84.18191 2.8490592 8.1029538 8.3571869
273 51.36610 1 86.41476 3.3153649 3.7653140 7.5765366
274 41.47842 5 88.45246 2.3323809 8.2850040 9.5203210
275 49.62226 1 80.36982 2.3135335 6.3013159 6.9202410
276 34.91515 5 80.73890 3.6547767 8.0054238 3.5799625
277 22.37481 4 75.23941 2.9337471 13.8410041 6.4643321
278 34.96977 3 84.65677 6.0480087 5.0551171 9.8673601
279 52.55258 3 83.58175 3.8758895 7.8889825 10.4325890
280 53.75735 1 88.17062 4.3876838 8.2377669 10.0526734
281 56.16410 3 87.52416 4.5555655 7.0997595 11.3131685
282 41.14104 5 83.17561 3.9069010 6.1035721 6.0588950
283 42.98435 2 80.80744 3.4729048 7.6099515 6.4192278
284 42.97848 4 84.87876 3.4642080 7.0258906 7.0422812
285 46.71991 3 78.56321 2.9421292 4.5999357 8.2820908
286 36.50900 4 82.33790 2.2204122 8.5980509 9.2149883
287 64.53542 5 88.30440 3.2594664 6.6869506 8.8773054
288 41.75684 1 80.43251 2.7952920 12.1201681 6.7315107
289 64.41097 5 82.48919 3.8293621 4.9693312 11.1881950
290 47.66810 5 77.89582 2.8605998 5.6293299 8.5354321
291 68.96651 1 80.56919 3.0036128 11.0245488 10.8386656
292 54.58244 2 85.88833 4.5470678 6.4735744 9.9200040
293 55.82903 4 78.94168 3.6769166 5.3096057 5.7276806
294 59.22130 4 90.83969 2.1211130 5.0263398 6.5200310
295 49.90190 4 82.58611 4.1816209 7.4495551 9.1730529
296 48.92708 1 80.41636 3.8306416 11.2753874 7.8219001
297 53.55558 4 79.89352 2.9527241 8.0988796 8.1731198
298 52.66174 5 85.53734 4.2766678 4.1209559 7.2476028
299 40.99818 3 82.53004 4.9625460 4.2773750 8.0858082
300 37.83760 3 79.82873 5.0068698 7.6084146 7.6466988
301 60.39080 2 84.34603 4.4135420 5.4003725 5.3887397
302 40.90412 5 81.28268 2.6791495 4.8107836 10.6503254
303 47.68848 1 84.46418 3.8757054 8.6936620 9.0851762
304 51.18723 1 87.91495 3.9111520 9.3618127 8.3591363
305 43.68276 1 77.06922 3.0776867 7.2464782 7.2803694
306 30.86993 1 83.13891 3.3873608 7.6719014 6.2598477
307 40.68082 5 73.78748 3.0913504 8.5345064 9.1823905
308 57.67062 4 84.98859 3.2193662 8.5530904 8.0688982
309 30.58522 1 84.44953 3.1735824 8.3743454 7.8124542
310 43.03093 5 69.14142 5.0284190 6.0578046 5.0802193
311 73.35646 3 81.61785 3.1889044 5.4980398 8.5881920
312 51.16955 3 83.46161 3.1141544 4.8569341 7.1482724
313 43.03403 5 91.33623 3.4567459 4.8948295 7.1326057
314 43.00288 5 85.17092 2.6119774 8.2848068 6.6970172
315 60.67046 2 80.52043 2.7109178 7.9663775 6.0380973
316 53.88054 2 84.90908 3.6358596 5.1490326 3.6223391
317 60.23815 2 86.02423 5.8195016 7.9676780 8.0674916
318 51.47905 1 85.08049 4.0623050 7.2157289 6.6067317
319 42.94863 4 88.71571 3.8033819 6.6149313 7.8685086
320 60.01294 4 76.04660 2.3849481 7.6638769 9.0911306
321 75.60049 4 86.34832 5.9464205 6.1786920 6.5184559
322 41.60884 2 81.36024 4.1315298 8.0436420 7.9793619
323 39.75867 5 85.15489 3.8971329 6.5376162 8.9046854
324 40.81363 1 77.93341 3.4458565 7.2333999 6.4287993
325 52.73838 2 86.30072 4.0834100 6.5757152 6.2629793
326 44.40444 4 80.00496 2.3651602 3.6829311 8.0661976
327 57.45461 3 87.82205 3.2053974 11.0554472 8.3484613
328 46.25259 5 81.66691 2.6941351 5.4599813 7.1507776
329 51.02224 4 77.42300 2.8888818 8.3979867 10.7731830
330 36.24486 2 82.45906 5.2617384 7.6050209 8.9204403
331 47.25879 5 86.49319 3.5096456 6.2512310 5.6651036
332 58.62024 5 81.70604 2.9025832 10.1743314 6.3871441
333 31.68790 4 79.59359 2.3130670 10.2121376 10.6365396
334 53.32296 3 81.59562 3.8918669 7.2176538 12.4832073
335 51.92098 4 73.72286 4.0610060 5.4431897 8.3892847
336 31.70576 4 86.57953 4.6632197 5.7229928 5.9389624
337 49.89156 5 89.43372 4.4059851 2.9704328 6.8351586
338 50.37434 1 82.51437 1.9159008 10.1619657 7.2889568
339 26.88312 3 88.56154 2.6529377 5.6783791 8.6395183
340 48.78951 2 85.00763 3.1971914 8.0698087 4.5819188
341 56.54854 2 88.60340 4.6446443 8.1215844 7.7652305
342 49.96327 3 76.73115 3.1233180 8.5737618 11.2560408
343 33.43687 4 83.89074 3.6727242 7.7228793 8.3515989
344 35.45154 1 82.88205 4.0860056 4.4253926 6.4095143
345 56.17323 5 75.44652 4.9564776 6.8549583 9.6678430
346 44.65381 3 81.49361 2.9182657 5.9833916 9.6033596
347 48.05166 3 81.24416 5.2537486 4.9673986 5.6430030
348 49.69976 4 86.87084 2.8567763 7.6524503 8.1318578
349 38.51846 4 79.55156 4.1321959 7.8723376 5.2151108
350 59.81332 3 80.28564 2.8628456 7.2552553 6.8848307
351 67.68132 1 75.73517 1.9880649 4.5408955 6.8781484
352 49.86758 4 84.96846 3.9174938 10.6249511 10.1354209
353 47.84437 4 88.26552 2.7987287 6.6203567 10.4065910
354 40.69891 2 77.16365 3.9166287 6.5634165 5.4679237
355 53.15034 5 77.63224 1.9659520 6.6346756 6.7667334
356 49.45965 1 82.81626 3.2640002 7.3278049 6.0773030
357 57.70931 5 77.04005 2.9854894 6.5250495 7.6148853
358 55.45649 4 76.12009 2.5983569 9.1142665 7.2835844
359 58.86586 3 79.86063 1.6820942 6.1017483 8.6245347
360 66.60010 4 81.21182 4.2405311 4.5674448 8.6311587
361 46.24868 2 77.23316 4.7751192 6.1333056 7.4772574
362 48.24115 5 84.91613 2.7576590 9.3000466 11.2747593
363 49.29004 2 78.94569 3.8312491 6.1700879 9.7377339
364 48.23924 2 81.87480 4.3676298 7.5409173 9.6313175
365 38.30496 3 84.98301 3.4326001 7.6219973 9.2966216
366 48.36752 2 85.40788 5.1970409 7.0809804 5.1344775
367 56.77950 2 82.26426 2.9721314 8.5812736 8.3997100
368 48.63615 4 85.94282 4.0889863 11.8231425 4.5014281
369 75.15831 4 78.46463 3.0897165 8.5155442 8.2287917
370 43.96108 5 80.15148 2.8255801 6.5366546 5.8319208
371 45.72109 3 87.51931 4.6272348 5.9987946 5.8628590
372 38.12306 1 82.97056 2.7134665 8.0183601 9.0262583
373 48.80942 3 78.43305 3.2322910 7.2185802 5.9857637
374 41.45204 4 90.72567 2.1321973 9.3072476 10.5603356
375 55.40684 5 84.94234 4.4660651 7.3153291 5.8159090
376 31.66697 4 92.25328 1.9509791 7.1687301 8.6964944
377 48.31651 5 80.58116 1.9846318 13.2623519 6.3321786
378 64.96190 3 79.38548 2.9245427 6.1262545 8.1783036
379 37.68266 5 80.54277 2.8294922 10.2245905 8.0405766
380 52.01144 5 77.89364 2.0847789 4.0865317 9.1702367
381 40.39230 2 85.23445 0.7890092 6.6225204 7.7035297
382 46.95556 4 84.01523 2.2567848 5.0087390 6.8897865
383 47.47348 2 95.75794 3.3072441 6.8384408 8.2782144
384 52.26503 2 83.37544 1.9306494 5.7759790 7.1412885
385 40.60178 4 84.06689 5.2180620 7.2040528 5.1513145
386 52.52180 1 82.08515 5.5169113 7.3504478 6.6192186
387 60.04796 2 77.72452 2.5793782 8.3216465 6.0339141
388 62.94717 2 83.63994 3.5285109 8.6788343 4.9550650
389 56.32484 1 75.81988 5.3187431 5.1490810 6.9822458
390 51.22108 4 84.72812 3.4815680 9.5282620 7.3156138
391 29.96823 3 82.12920 3.8022776 7.2746674 8.9538205
392 42.44291 4 76.49023 2.0769477 7.1524361 6.9066347
393 55.46281 3 87.44723 4.3168226 5.3485557 4.9413439
394 32.04617 5 83.01130 3.2152758 9.8180637 5.2312983
395 44.88512 3 81.48079 1.5662020 6.4983678 7.8005504
396 66.39641 2 80.93552 4.5454379 7.1615526 8.8937384
397 58.62242 5 81.77963 3.3297673 8.5781426 7.4758376
398 41.04113 5 83.38111 5.2664950 8.3702018 9.1248297
399 45.05039 3 84.41922 3.8731853 8.0516745 8.8160388
400 25.63154 1 84.97513 3.7251309 6.8972746 13.6464574
401 51.64748 5 80.86058 3.9079804 6.1038996 4.6871616
402 36.58115 3 82.49431 2.8122349 8.6503767 8.7254817
403 42.36605 3 89.46610 3.0220845 5.4290891 7.4566691
404 59.75904 1 83.84462 2.5072643 4.3629613 8.2676068
405 67.79858 2 85.68994 4.4854913 9.8834000 4.9670216
406 41.90713 4 81.34750 2.5601366 9.5527557 3.9480172
407 46.79716 5 82.66647 1.8193189 6.1888603 7.7003152
408 35.57263 2 73.99681 3.9675052 11.8090695 13.0420932
409 46.52752 2 83.88062 2.4099375 4.1206150 6.6689074
410 51.12504 4 81.59046 4.1127910 11.0437161 6.2440588
411 48.88750 2 86.96479 4.3552843 8.0130794 6.8012161
412 54.33484 3 86.71892 2.2384115 10.8454313 3.3003287
413 51.29705 5 81.19971 3.9437720 7.4730851 9.2346767
414 53.59262 5 85.52642 2.4941477 7.4503521 9.2325681
415 56.23857 5 86.07620 2.0568945 5.4331655 9.1635582
416 68.48629 3 79.49196 4.0285414 10.1016666 6.9724474
417 53.05990 3 78.56297 2.8590211 4.9521500 8.9506366
418 48.64412 4 83.12281 4.1469127 6.0095218 9.0329042
419 59.43625 1 79.37908 3.8106144 6.7511753 8.5465925
420 41.86886 2 89.74806 3.4380293 6.5072992 7.4185246
421 58.84467 2 91.39584 4.5202574 7.3004710 7.4515424
422 45.64815 5 89.28714 3.9480546 8.1912190 7.1479827
423 27.94084 5 80.07323 2.4875555 8.6868353 9.3888847
424 53.27594 5 85.92126 3.5844079 7.8992660 8.3196134
425 49.83120 5 75.96452 4.4270148 8.5700310 7.8885730
426 54.36819 1 81.68264 3.9810034 5.0297761 10.8774495
427 75.61114 5 82.81526 2.7531349 7.0625879 8.1854558
428 37.67218 4 89.60588 5.5294166 8.6746436 4.9705472
429 25.76139 2 80.43842 2.5690213 6.8544524 9.0508559
430 50.81679 3 81.82832 2.2921064 3.5974694 8.5921643
431 50.92835 2 73.69713 4.3888402 4.5370601 5.2969352
432 48.40122 1 84.32996 3.7757093 4.7890397 7.0958168
433 71.03991 4 84.07727 4.9296902 8.9936977 10.2301459
434 46.09190 3 80.61551 3.6629005 6.8282831 4.9758818
435 34.42113 1 80.67368 4.0369156 7.3692406 9.6097813
436 66.61072 3 82.20534 2.4474489 10.4010054 5.4510863
437 37.62241 2 84.40466 1.8756204 4.8352673 4.7669584
438 58.73803 2 81.52296 4.5282472 7.8440758 13.6443218
439 28.71284 3 79.37934 1.4501001 6.8935418 4.9880254
440 51.07494 4 87.41169 3.4486061 8.7865292 9.1325800
441 50.77665 5 81.27025 3.2510639 7.6503063 11.0074591
442 51.88535 3 89.60781 2.9563382 7.2674272 11.4030062
443 57.99570 5 83.86607 3.2421127 4.8759823 6.6364333
444 30.01724 1 77.05741 3.0658849 11.2640696 7.5886759
445 52.36040 2 83.65818 3.2590449 10.2478010 7.4483916
446 42.56553 4 83.91889 3.5384404 7.5681985 6.7366509
447 60.26956 1 75.48644 2.7984701 7.8858910 7.7447170
448 28.08152 5 87.67689 4.6487636 4.2307555 9.3257278
449 53.70944 1 82.26821 4.2833448 4.3376560 10.2428577
450 47.64489 5 83.81094 2.4288058 7.8005849 6.5854049
451 64.31701 3 80.62248 3.6177173 11.1853301 9.4185798
452 52.23081 2 85.10186 4.9214505 8.4853587 7.5489479
453 55.79934 1 83.30544 4.5575286 4.4039196 4.5368759
454 33.83519 5 90.99502 2.8017924 8.7866440 4.3792261
455 57.46759 4 87.23005 4.5143755 4.8623029 5.1122743
456 40.61292 4 83.67099 3.1010267 6.1470309 7.7220800
457 53.06542 4 86.95918 3.6052421 5.2528197 7.5857628
458 48.33262 3 86.80374 2.7788952 5.7950224 11.4303001
459 47.97352 1 82.38887 1.6569927 6.1709943 8.6407538
460 50.33682 4 83.85635 2.7405809 2.6013261 12.6088965
461 46.97443 2 84.27155 4.4486088 10.1053561 11.7593859
462 56.06210 3 86.03413 4.2188826 5.2084425 7.7168679
463 52.57244 4 87.81599 5.1068962 3.6587730 7.5959844
464 65.10080 4 83.13061 2.7862578 2.2633329 7.8685352
465 72.37252 5 87.04276 2.0510513 3.5681013 5.0868364
466 65.02852 2 85.83322 4.4811071 6.4603960 8.0315982
467 63.67134 4 82.87609 3.4781339 8.1675271 9.3993090
468 36.47778 1 72.88509 3.1198168 8.3442190 6.3540730
469 52.91818 4 80.20417 2.5769285 10.3902149 6.9009311
470 58.88325 1 84.53703 3.6643974 6.8225799 7.3424446
471 62.07536 1 84.31569 2.3648162 6.4087763 6.7623739
472 66.23407 3 77.92829 4.0982580 5.3428272 5.7989450
473 53.96391 1 79.79110 2.4361733 7.2884860 4.3534521
474 44.03932 2 86.49094 2.2398276 5.7379737 5.9469711
475 59.19257 2 79.96299 3.4212418 9.0132212 11.5879081
476 55.51789 2 78.63747 0.6580892 4.9777685 8.8897388
477 63.91326 2 88.80678 3.6610287 5.5669067 6.0751772
478 61.65283 3 78.09628 3.3993679 6.3939698 8.4275725
479 62.40064 5 84.10310 3.5794379 8.5402203 2.7080846
480 67.62994 1 80.19256 2.7176753 6.4979142 8.2877281
481 64.22269 5 89.16048 3.0162473 8.1349633 7.6374592
482 41.97650 1 84.00967 1.7941725 10.3523633 5.9647808
483 67.25984 3 76.11063 4.3788701 7.4171265 5.4670528
484 49.64369 3 79.81914 3.5013696 6.8865666 8.4182855
485 40.81853 5 85.83809 2.6694073 6.9554746 7.6236559
486 76.02496 5 87.01079 3.7963546 8.1923836 7.9827957
487 31.49478 4 83.71158 2.5819937 6.4950894 5.3096445
488 47.68288 5 86.58011 3.1369394 8.8406096 4.8129111
489 59.27699 5 85.41420 5.4077861 3.4992044 9.2587247
490 52.25568 4 80.35441 4.6162481 7.6840839 6.4660256
491 50.16813 4 86.54034 4.1373142 5.1495162 7.7722856
492 37.66873 3 84.97226 2.3932513 9.9705889 6.5456906
493 55.57407 1 81.15792 2.0494583 10.3446893 5.4421496
494 26.86275 4 82.55559 2.7073512 7.3012715 9.9477757
495 54.10947 3 81.07621 2.5824874 4.4681130 10.7783196
496 45.80306 4 79.57714 4.1161837 5.6668934 7.7914403
497 44.11791 4 83.89740 3.8785771 7.8828425 10.9373736
498 49.56608 2 85.99407 3.1808780 9.0448360 7.4959882
499 57.43336 5 89.05429 3.0752037 5.0010310 6.7722412
500 60.07486 4 78.71078 4.1638757 2.7672289 5.9875322
501 57.04303 5 85.41627 3.6232413 2.1505105 3.4909954
502 42.04666 3 74.00565 1.4539816 9.6902319 7.1208232
503 50.25151 5 74.93801 1.9635002 7.8925780 9.1481133
504 59.32957 3 88.04302 3.1918958 6.1396183 7.0234365
505 47.12832 4 91.80221 5.7855987 9.6505252 7.5281501
506 55.68824 2 78.71495 2.6112044 9.5823585 7.9795686
507 43.63686 4 80.80091 5.2994055 8.8069536 5.4684316
508 44.32639 1 78.64599 3.0682090 11.2144451 8.1766868
509 43.12086 4 76.50919 2.5513209 8.1238803 5.4167697
510 52.30435 5 82.75809 3.2621232 3.2584811 10.0533337
511 48.71147 4 76.86789 4.2075971 7.3643745 7.0909337
512 40.88391 3 77.90879 5.0841713 5.9557020 7.1025005
513 31.82474 5 90.70252 4.3234956 6.0517174 7.5515047
514 33.14716 5 83.79457 3.0708940 10.8378683 10.4479674
515 49.86958 4 91.42591 4.6873421 9.4851919 5.3658664
516 48.86873 5 82.85281 3.6895060 8.3483821 9.9003979
517 56.67458 5 89.49354 3.2277891 11.1387717 6.3958334
518 54.00904 2 77.59944 3.5661916 4.4981548 6.2140548
519 34.95430 5 85.45828 3.6623398 4.6749250 4.9304896
520 64.23085 4 78.10244 2.7421047 7.3771586 10.0177350
521 49.65736 3 76.91016 2.6611886 8.4608158 9.1525120
522 45.48062 1 83.29129 3.0337464 9.5114113 9.8309346
523 55.60467 5 84.23478 3.0060128 5.1471445 9.4701812
524 63.47086 4 85.57282 3.2774555 10.5283612 5.5441646
525 49.57453 5 78.30043 3.5841538 8.3812402 10.0105445
526 57.71945 4 81.88623 4.0524700 6.0070576 7.5594759
527 56.76738 1 79.07079 3.6264868 6.7274933 10.3278196
528 52.09467 5 81.27435 3.5261694 6.2488747 7.5657127
529 47.72315 2 76.17658 3.9329477 2.7886554 3.7230052
530 47.40208 5 80.23869 2.0748050 6.3574638 10.7981512
531 39.22100 2 83.00484 1.2128791 5.4401390 5.5422086
532 44.38729 4 84.09535 2.6110822 6.8459257 3.6241442
533 45.99099 3 83.72263 4.7886804 7.6908896 8.1167474
534 55.89217 1 90.47269 2.5488860 6.3098085 9.2871947
535 62.16898 2 82.86844 3.3343909 7.9924598 9.8645245
536 65.25365 4 81.97241 3.5200934 8.7992733 4.3745460
537 60.54359 4 83.99041 3.8514812 4.6903871 8.7498348
538 55.90686 3 85.14082 4.3771351 9.9818490 7.5908393
539 43.57607 1 76.83811 3.0777878 7.0948945 10.7421792
540 37.75153 3 69.33465 3.4096554 7.7015431 9.2719898
541 50.45283 3 78.89177 4.0093349 2.4317748 11.8884019
542 61.83155 3 82.94519 3.6665477 5.3832216 6.7612283
543 46.19009 4 81.21536 4.3779619 9.4712114 8.6189239
544 75.95150 5 84.58038 3.1374143 4.5495685 7.8016203
545 52.35136 2 81.20839 4.1243059 7.2369331 9.2291708
546 56.26182 1 71.96596 2.2553675 4.3256015 11.4303765
547 57.35270 3 82.73002 3.4156999 6.0585495 8.7033630
548 33.87351 3 73.99231 6.3483836 7.0548125 5.4483034
549 63.89855 5 84.96889 3.8469122 6.6548562 10.2391237
550 42.54814 4 80.88124 3.7461959 7.8111008 6.4865195
551 48.58558 4 82.94891 1.9640186 6.7871478 7.5423153
552 55.33482 2 84.96652 3.6207657 6.1622889 6.6921023
553 41.52486 2 78.89591 2.8743566 9.1856677 7.8087088
554 45.66093 1 82.32658 3.2446772 7.7621870 9.5469316
555 41.44816 5 81.76943 3.2118140 8.7623436 6.3402467
556 33.96084 1 83.69936 3.4116650 5.5851768 7.4819727
557 43.08712 5 83.41302 1.7143999 5.5639140 8.4254173
558 35.31462 2 80.27489 1.8104520 4.1328084 6.5538454
559 57.57896 2 80.61663 0.2596881 6.5156837 9.1886329
560 37.78099 5 86.43832 5.6050034 7.6163400 7.0579837
561 42.46397 5 85.88121 3.9875494 6.5929212 5.2365104
562 52.55278 2 78.70619 4.7786601 8.9402530 7.0955833
563 32.98590 5 88.38553 4.4603331 7.3585026 7.0280126
564 50.94436 2 86.24690 2.6629598 5.6481029 11.0141803
565 45.89907 4 85.80198 2.7337720 11.3963164 4.2229626
566 41.41838 1 90.46556 3.3454343 7.3392766 6.6661994
567 47.10857 2 76.34828 2.3641952 6.4717514 6.9264920
568 48.05796 1 76.97943 3.7257834 8.0062473 4.5831833
569 58.08115 1 85.73654 4.0064287 8.5681876 5.7621005
570 45.73928 4 86.97079 3.2162518 6.1237166 3.7238406
571 62.40737 1 80.60600 4.2364642 2.9893393 4.0725975
572 37.81015 2 81.16680 2.9294733 8.0076928 6.4783917
573 42.63581 5 84.19191 2.8775402 4.6594799 6.9878461
574 46.65739 4 82.09754 1.2411653 8.0038430 13.3684260
575 59.05073 5 84.09063 4.5364421 6.0314742 5.0524292
576 49.93150 3 86.73219 1.6055536 9.1274795 9.5320734
577 30.45346 2 81.55088 2.7130407 5.6559341 5.3517557
578 54.18768 5 81.79928 3.3913882 4.6020837 6.1722298
579 30.46336 4 74.84815 4.9827895 0.8994521 10.5680965
580 37.05176 2 80.63516 3.1644402 7.5289414 5.5792834
581 56.72717 2 77.07592 3.2902586 6.1159929 8.5802047
582 40.81033 4 86.31240 5.0022934 7.8152963 8.5934816
583 59.63141 4 74.71260 5.3243994 7.4353465 6.1695120
584 53.57180 1 86.27401 3.1177442 5.2861541 9.1605487
585 62.35322 3 82.38507 3.1229917 8.2059819 10.6541109
586 51.75556 2 86.35034 4.3988854 5.4341350 4.9528349
587 38.90721 1 86.33186 3.2354394 7.5803651 9.9776575
588 29.63742 2 79.31867 3.6801456 10.0126869 9.4174376
589 48.79223 5 79.48454 0.7842932 8.7072298 5.0454718
590 45.48511 5 82.77357 2.6645515 8.2453278 9.8107188
591 53.55329 5 82.03936 2.3117356 5.6073664 8.3383553
592 52.44154 4 81.85813 3.8702653 10.3426526 7.3137935
593 52.11548 4 75.95416 2.6132607 7.2900203 7.8050096
594 54.00836 4 84.44246 2.6123331 4.2384603 7.7343369
595 77.55420 3 84.80684 2.7679003 7.3597035 6.0770305
596 48.39641 4 84.88776 2.8314513 8.0285956 5.6758375
597 56.66807 5 82.98812 3.7036272 4.3032248 6.6418275
598 63.03855 3 76.47653 4.4091554 9.0446043 8.6287640
599 54.09483 1 90.99583 4.7409828 5.4121187 10.1093772
600 57.93973 4 83.64287 4.1328710 4.0392297 5.4011323
601 60.45372 3 82.33889 4.4098909 9.1299517 7.2021346
602 46.36589 3 82.98633 3.2723047 14.2657244 4.7623214
603 74.83412 4 80.74048 1.6675130 2.9976952 9.4494620
604 48.00362 2 84.40796 5.7627101 3.7334130 5.5880501
605 52.91244 3 82.99033 4.2983591 10.1130586 9.0919401
606 58.73080 5 72.64646 2.5841426 5.1582275 5.6165557
607 40.32385 1 72.51510 4.7732220 14.2712129 8.4746911
608 57.15276 2 81.81867 4.6099825 7.2052954 9.0797703
609 59.26803 4 81.51742 3.3681093 6.8976265 7.2314362
610 40.11361 2 92.63638 3.6409104 5.3790264 10.7501636
611 76.09194 1 79.93019 3.5427934 9.3961707 10.7590340
612 49.65173 1 84.55529 3.8131419 5.5066660 10.0901665
613 44.70795 3 91.18584 4.0700206 9.2696568 3.2653742
614 54.99754 2 85.29387 3.9474968 4.0762091 8.1569817
615 69.54686 2 84.08464 3.0727880 7.9687655 7.7296680
616 38.85055 4 76.48081 1.6690666 4.4198152 5.1086672
617 45.37981 1 83.68913 4.7223387 5.5218569 7.2404425
618 47.12620 1 79.01018 2.2444393 4.9852728 8.3925462
619 51.88259 3 76.60332 3.3820628 9.5226106 11.8575919
620 59.54208 1 85.50769 3.3913392 4.9916864 8.3898705
621 52.95995 2 87.16660 3.8069263 9.0284635 7.1811110
622 69.35729 4 85.09402 4.9999717 5.0966668 5.8895920
623 54.92037 1 84.38585 3.2286249 4.0281683 9.0077313
624 32.15020 3 80.23145 1.4110268 6.8737408 6.3483016
625 61.20500 3 85.28657 4.9850133 5.1688843 5.3711196
626 52.19540 4 83.16910 3.3634952 7.6668448 3.9169988
627 61.55707 5 75.77009 5.9814422 7.9937878 9.4067769
628 36.45332 4 81.44227 3.0027402 8.0615670 6.5042551
629 43.91359 3 91.53597 3.2052186 9.7488598 5.0988180
630 46.88628 5 83.02614 4.3253508 5.1586691 7.3477577
631 53.32781 4 78.50010 2.5015610 7.4605141 8.9196767
632 59.95771 2 86.03011 2.9161620 5.9893698 6.4119968
633 55.90069 3 81.12839 3.7139554 4.6337785 7.2128942
634 47.91551 4 78.10472 1.9743997 3.3542517 9.4975309
635 50.13553 1 81.48953 3.6776313 6.9203659 7.4335577
636 67.31135 2 81.58370 4.2690136 6.2293799 6.7453797
637 48.73183 4 89.62199 4.1038797 2.9856666 9.8191192
638 47.70865 3 79.57884 3.8105824 7.6167578 10.5110925
639 53.92132 2 87.48532 3.0061747 7.8314239 6.9227578
640 40.94553 2 76.07135 3.2405269 8.1515002 10.0562012
641 64.55679 2 79.85281 5.0324924 9.2698417 8.6383511
642 72.41617 3 82.71265 3.2275364 5.1729341 9.1237142
643 41.42846 2 78.94758 4.0103363 4.6749944 6.8284175
644 57.26084 2 81.28054 5.7415128 10.1589891 9.7362271
645 52.26153 4 86.13739 3.9573923 9.4191188 6.9959723
646 43.72801 4 76.09071 4.1973401 5.6529756 9.4843379
647 52.11620 5 84.48414 2.4904077 5.1023934 5.2857135
648 36.05032 2 79.86941 2.4040620 6.6499787 10.3930537
649 50.16953 5 83.19768 3.9767186 7.8016819 6.8563787
650 56.05656 2 82.42455 3.3414508 6.4014938 7.8577059
651 57.19968 2 78.65008 4.5328883 6.9362278 7.0025888
652 61.74547 2 83.89864 4.4338542 8.2744921 9.5454023
653 44.09657 1 81.12503 2.3896876 8.0202892 7.3113760
654 61.43785 4 87.20598 3.6088510 11.5115599 7.2914940
655 57.43139 1 82.07545 4.0971899 7.4043411 7.7800303
656 45.08888 5 80.59953 3.7406271 6.9207011 5.3526044
657 43.51898 1 83.24849 4.1183227 7.9730769 10.0151939
658 59.75340 5 86.04466 4.4341284 6.1276698 7.9556343
659 44.69738 3 83.17983 3.8854912 8.7736740 7.0120068
660 38.56905 1 78.34546 3.3755285 7.5742513 5.6408015
661 58.31035 4 77.30653 4.1751864 7.5984242 4.9680496
662 62.74704 5 79.85760 3.1479367 8.0071950 8.4334452
663 59.87735 5 82.28768 3.8291819 6.2640531 9.9285475
664 50.32396 3 82.63808 3.7402907 5.8396591 9.0710694
665 48.84730 1 94.09411 3.0200407 5.9889019 0.1878578
666 56.25316 2 80.34285 3.0123548 7.7937224 5.3519790
667 33.99065 2 80.94987 4.1055041 7.4495973 8.2786713
668 55.39626 5 87.84723 3.3938575 7.2082447 7.3372017
669 59.54607 5 81.71093 3.0920523 6.4756617 10.2585890
670 39.24223 4 77.95325 4.3175261 4.8580942 9.4076983
671 61.64949 4 82.85432 4.0989316 3.9807586 10.7391489
672 25.29788 5 91.60745 2.5143505 8.9119114 5.5982659
673 43.29613 3 92.88842 2.8443558 8.5209775 6.3971761
674 70.89989 5 83.19869 2.2069506 5.9131019 4.0424014
675 33.40955 2 89.28700 3.9653937 7.9486114 6.1090922
676 46.43118 1 91.47729 3.0779006 7.7771366 8.1192847
677 52.92636 5 83.92199 3.0249443 6.6336874 8.0508785
678 40.54482 5 84.76667 1.4990395 7.7868695 10.2699835
679 40.68009 2 87.22228 1.6236748 8.0189672 6.8026139
680 53.81469 1 86.99651 5.6609763 7.7143857 1.2190186
681 45.39155 2 81.74059 4.8355115 8.2150951 6.0615519
682 35.11540 5 76.98794 4.2326865 10.7903596 7.7909829
683 40.48404 5 83.06286 2.3901888 9.3635682 8.6298166
684 44.63281 2 81.21067 2.9363055 4.1661417 7.3183952
685 51.32330 4 86.70213 2.5233106 7.4203428 6.4102831
686 45.70598 1 84.28030 3.7553787 8.2170943 8.5918949
687 57.16061 5 88.79670 1.9822143 4.5618642 7.1752123
688 28.43121 2 78.08530 3.6118463 5.6195741 9.4715933
689 42.86838 2 74.33993 4.4317416 6.2635239 8.8537750
690 36.80256 4 74.14793 2.1324729 4.4000404 11.0278339
691 41.18611 1 80.35012 5.1951467 4.9177421 9.4497460
692 65.34327 5 82.26077 2.5140044 7.3365422 9.6208471
693 35.21741 2 87.12162 3.9696058 7.7529592 5.5917637
694 31.97761 5 86.84730 4.5364307 5.7995599 10.3813012
695 51.02413 1 77.57863 3.2485914 2.2955241 5.9309454
696 48.61956 4 81.86457 3.8687918 8.9603520 8.0358163
697 55.98726 1 77.02111 3.3429007 7.8171877 11.6807972
698 40.60172 3 83.19576 3.1898269 8.7607578 10.3964264
699 46.06958 2 83.63239 3.8535965 5.2972436 5.7944520
700 44.02785 5 82.16258 4.5944469 10.0723095 6.7532583
701 39.37010 2 87.61284 5.4532666 5.8986463 8.3960198
702 55.96806 1 82.90955 4.1554635 6.9838297 8.1626940
703 39.33574 4 73.78189 4.3890153 9.9381344 8.3262968
704 42.64917 1 78.35130 1.7528701 6.9345029 5.4615917
705 74.14214 5 76.51042 3.9407707 10.1047651 6.4267019
706 63.70505 2 86.27697 1.8587435 7.0936459 5.0934380
707 55.70518 4 76.61849 3.2781425 5.5228833 4.9666243
708 90.08043 2 79.49151 2.5415448 8.9295241 5.4821513
709 47.34475 1 84.90095 2.0760350 6.9242302 6.5337831
710 41.94383 4 88.30177 2.4772054 9.2108265 4.9992891
711 47.96936 2 82.35766 3.8623042 10.0203804 9.6524685
712 50.59073 3 87.22206 3.9562445 6.1049343 8.2717349
713 43.05201 5 81.42536 2.9384807 9.5100574 10.6961010
714 49.44794 2 93.14069 4.0516470 6.1947208 6.4970789
715 52.73344 3 93.20930 3.1695320 8.8872288 5.7016554
716 65.07171 4 86.28006 2.6409457 4.5644891 6.2652956
717 46.54395 5 81.34329 3.4437148 5.8276768 5.5129036
718 34.49758 1 82.01289 4.5144983 12.4373223 8.1430056
719 44.08286 1 75.97405 5.7949685 5.2529329 7.1120675
720 43.94360 2 92.33965 2.3456327 5.6445888 8.9999592
721 50.26785 3 88.13071 3.0757056 5.3166044 6.7742995
722 32.59828 2 91.75681 2.0870342 4.5404611 7.8516697
723 46.07944 4 85.60149 3.5673354 4.0374032 7.0581284
724 60.69333 2 87.06940 3.7900729 2.2838459 10.6879408
725 49.25044 4 84.81174 4.4463709 5.3168106 12.9302499
726 50.97240 5 82.61611 2.4043292 8.8453799 5.7048697
727 65.22491 1 80.78268 3.8659031 8.4682061 6.4416410
728 65.78504 1 79.91519 3.5513184 4.4783488 7.8271466
729 50.17182 1 82.45606 1.5632605 7.8583715 7.9683714
730 63.21278 1 77.35893 3.9567796 7.6771107 8.4959245
731 32.62314 3 85.40224 3.1647658 6.9638975 8.3230864
732 64.73693 1 80.68344 3.5456484 4.4222526 7.8642309
733 50.89795 4 88.65238 2.8761105 5.8940713 5.6905454
734 25.47995 2 86.59899 3.5659515 8.8053234 8.4484843
735 54.40845 1 76.99936 3.4464620 7.8320404 10.2589464
736 54.09298 5 84.57081 1.5663112 8.4134969 9.6816936
737 60.73950 3 87.57271 3.4597686 6.6600951 5.1929241
738 44.47861 3 82.08502 2.9181055 9.7687849 3.8967770
739 45.19552 4 88.66030 3.8379190 6.8134061 8.0033180
740 37.04460 4 84.39172 4.5746865 11.2333250 8.6493592
741 56.18911 2 77.68447 4.2642996 9.2170980 7.7292155
742 50.67542 3 82.15142 3.9380157 4.9859330 6.2809477
743 34.29563 4 81.25437 4.7685746 6.2944836 9.1960128
744 47.88801 3 87.31304 4.3581250 5.3510036 10.1214074
745 59.24915 2 83.16427 4.3350811 12.2032734 7.7411542
746 20.75134 5 75.40548 3.0465630 9.5264552 8.7142898
747 62.98501 3 76.08018 3.5693859 4.4678555 10.0826632
748 44.24001 3 85.23026 2.4417233 3.8538647 6.5616571
749 55.61397 3 85.54668 4.2699260 6.6489440 10.1044397
750 44.71681 2 85.10742 1.9468484 7.1970510 11.9016005
751 70.64367 4 84.68186 2.8307887 1.9243617 10.0912841
752 52.72134 5 84.45393 2.8929482 6.1982043 10.3626428
753 65.18793 3 78.70169 3.8465153 6.2278357 7.8798070
754 62.04000 4 82.04876 4.0679926 7.0349095 6.8335785
755 50.86110 3 81.88454 1.4522502 10.4041509 8.8299139
756 52.73742 2 89.77552 3.6376951 6.8935547 6.1145513
757 53.61833 1 81.33366 2.3676570 6.9537268 8.9734903
758 59.89684 4 89.99472 4.0153488 8.0190747 9.0502451
759 58.48055 5 84.84911 3.5592489 11.9862660 5.4597833
760 47.73746 2 82.88912 4.4664920 8.8652190 10.1587237
761 43.66617 2 83.49523 2.2300783 5.0733948 10.8265326
762 38.27060 3 79.65840 3.8925638 7.8799741 8.0552349
763 61.21116 1 81.00006 2.1027763 6.8796733 9.7077494
764 42.42030 3 80.98257 3.5292297 5.9087461 7.3573900
765 36.08515 5 94.01727 5.7998623 6.4707066 9.0610278
766 52.40435 5 88.44769 5.3590110 7.1959443 7.3514204
767 51.66869 4 73.30600 3.1830626 5.5863659 8.6874720
768 45.23087 1 84.33613 4.0072142 9.1840020 9.1085377
769 55.65344 5 78.66033 3.1533128 6.3783738 5.8311733
770 50.03411 1 73.41500 3.7198724 5.9247987 7.2163101
771 57.91626 3 77.34221 4.6529368 9.1452971 5.4441597
772 58.40468 2 88.12346 2.4612160 6.4769388 9.2628362
773 73.91532 2 77.92220 3.0994166 5.5245769 10.4515870
774 46.57411 5 93.89597 4.8192483 7.3441660 9.5572361
775 51.92469 4 85.43387 3.1465014 7.0492578 8.7615508
776 41.60323 1 80.37690 2.1501914 9.8183406 8.3918343
777 51.40265 3 81.45653 2.8369498 7.6106662 8.6793028
778 37.38972 3 88.29393 2.7012986 4.6523041 10.1244977
779 37.41341 2 83.44498 3.2114747 5.0782593 11.1983550
780 61.42069 1 85.58679 4.4948575 8.8967837 5.2261687
781 50.16955 1 84.07693 3.1386537 8.6955014 5.6222877
782 50.43100 2 81.92363 4.0655850 8.9829994 7.4847198
783 58.64329 2 79.68183 3.6878355 8.3359419 10.0452189
784 48.39705 4 80.40349 3.2989893 7.0586417 9.1741936
785 50.39780 1 83.16439 4.0497150 7.7374050 10.5423820
786 48.17520 1 88.48357 1.9821100 3.7640294 6.6171112
787 62.99596 3 86.28066 4.0717588 7.3295449 8.3015094
788 48.86042 2 81.51755 3.2850929 6.0868765 7.1492487
789 46.81069 4 85.12362 2.0869850 8.4411712 4.2378172
790 66.53805 3 78.76660 2.5688037 8.3566307 10.4819696
791 61.02936 3 82.59278 4.8912749 9.1893754 6.5894673
792 37.81042 5 83.46904 4.1556547 7.9079132 6.1285453
793 56.17780 4 88.26446 3.9636559 7.5851135 10.1363804
794 43.38625 1 83.87638 3.0462080 9.1131231 9.8671972
795 48.57625 4 83.88143 4.5577875 10.4568453 8.6756187
796 31.49869 2 80.13540 4.4379396 6.1419195 7.5633897
797 36.74002 4 85.09404 4.0699359 6.7275581 7.5658409
798 32.26888 4 81.25910 3.7181000 5.4007945 8.3507108
799 56.50578 1 80.68340 2.8089539 8.9535805 7.4957284
800 46.00207 1 76.08027 3.8294192 6.8301415 8.0787654
801 24.09705 1 74.59470 4.5796036 7.9788885 10.3468833
802 62.84461 4 84.68244 4.6560256 6.1409409 6.8399670
803 52.54119 3 88.08810 3.5512324 7.5818474 11.7595535
804 64.29926 3 78.35983 4.1667109 9.7990290 9.4847448
805 63.02696 5 93.56264 2.8893487 5.6860630 4.7376649
806 59.41136 4 86.16197 4.0759362 10.2651412 8.5968988
807 49.66132 5 75.80199 3.7603395 3.6997032 6.8747659
808 59.42226 1 79.50517 3.0200457 4.6965538 8.4090531
809 50.63253 1 81.22482 2.7207720 8.7325176 7.0967640
810 36.99466 2 99.13644 3.6901914 8.4494450 4.7530348
811 54.36698 4 85.29118 3.1378129 6.3573546 5.7891887
812 37.86854 4 84.90163 2.2734756 6.7302119 9.5394570
813 57.01332 5 91.13771 1.8300012 4.9063889 6.7459378
814 52.13753 4 83.24398 3.2740412 9.0640112 7.2376723
815 50.49214 2 81.47064 3.5093746 5.2208898 8.7406644
816 53.64805 1 86.66312 5.1491243 5.8994236 7.5857817
817 39.30045 4 86.34056 4.1750668 8.1597203 11.2410338
818 55.73454 3 82.79265 2.0814697 4.6313756 6.9126507
819 51.70868 2 79.15520 5.6201846 6.7158165 5.9660886
820 28.88835 4 90.51107 3.4271623 6.9809464 8.3527573
821 41.56666 4 78.83549 2.9205790 7.8506337 8.8429747
822 40.56889 5 80.05365 3.2621186 5.2909840 5.1571462
823 48.61522 4 80.78418 2.3022550 8.5282570 3.6321527
824 57.48973 1 78.53602 4.0959173 10.1866473 6.7097307
825 50.88262 1 85.47545 3.7715980 7.6368104 8.5997587
826 53.35357 1 82.49196 2.8109327 8.2513244 7.1903995
827 67.82154 5 73.72378 2.1994805 4.0274525 7.6283505
828 46.36597 1 78.58179 3.5246132 8.5494856 6.6781361
829 30.12299 4 80.35792 3.9912934 9.1230361 8.4558908
830 36.94867 1 77.37396 3.7168543 8.6903247 9.1491964
831 52.46420 3 86.93238 4.3412014 6.5611029 6.1274362
832 47.35781 1 85.03857 3.5286385 4.5459938 7.5105484
833 47.72897 4 75.66089 2.4853443 5.4621099 10.7768376
834 44.51930 4 84.58341 4.1880193 5.2854201 6.6134402
835 45.83105 2 82.81928 2.5580206 8.4847461 10.1569378
836 56.60199 5 86.47507 2.5220218 4.3970110 6.2417303
837 38.75980 5 79.12448 3.0347530 6.9590018 6.7098083
838 40.58359 3 79.32814 3.5638576 6.3222471 6.4448223
839 46.21958 1 75.29343 5.5419530 5.7392876 7.4563093
840 46.06002 4 85.51186 3.8171626 7.1735558 6.9396512
841 48.08034 5 84.01266 3.9789697 3.6268687 6.8262671
842 50.36410 3 89.57091 3.8735575 7.5587530 10.0306682
843 43.22491 3 87.38916 3.5780853 5.4514496 10.7363496
844 59.73636 5 83.90393 2.7026998 8.8441782 7.3362851
845 51.87804 2 79.44165 3.0342331 6.6507147 8.2430373
846 56.01128 5 79.68232 2.9056953 7.7130708 7.7974064
847 35.81715 1 81.80988 4.2210657 10.9363214 4.9764755
848 34.36998 2 83.45001 2.4876363 8.4381913 8.7228042
849 65.35055 3 74.78861 3.8270080 8.5190805 8.3953374
850 31.57856 2 74.82708 3.6802858 7.9551623 7.9011528
851 38.27854 5 84.42956 3.7769530 6.4552692 8.8719467
852 46.08814 5 82.01531 3.3572988 6.0691545 7.2440425
853 46.70045 2 82.25607 2.1644203 7.2211337 6.5312999
854 44.05429 2 79.72151 1.7292381 5.0124607 8.9950683
855 63.67365 1 84.51337 4.2124023 10.0828104 3.4225783
856 46.83899 3 81.90218 3.9353354 7.2664768 6.5294237
857 42.78063 1 86.18957 2.8095993 4.8431207 4.9668913
858 43.82553 2 82.82398 3.3734713 4.8114481 8.3275706
859 37.03305 3 81.30657 4.2412724 7.3015803 4.6083727
860 48.97691 3 79.70748 3.8515016 7.0214537 7.1973668
861 32.52138 5 87.16318 2.7835305 3.2845765 9.3121909
862 62.42894 3 83.74602 3.4751541 8.4102825 9.2011955
863 47.62711 1 89.65040 3.9762071 8.5190831 7.2998522
864 46.21580 2 75.17034 3.1070990 4.3972145 9.7652539
865 49.65903 4 83.00631 4.8965212 1.9701069 10.2614664
866 51.38825 1 84.65356 4.4010311 4.1259944 8.7486762
867 62.67017 2 73.68117 2.2989099 7.7903104 5.6935531
868 44.86136 3 81.27302 3.4389930 10.8666565 6.1603279
869 54.33556 4 74.07786 5.3216570 6.6430293 5.3829531
870 45.90466 2 85.50937 3.4799697 6.7097053 6.9435382
871 32.04848 3 77.39970 4.4802003 7.9353779 5.6402786
872 62.07288 5 80.87858 3.9762932 9.8448580 7.4994080
873 46.90102 2 86.14010 2.4920660 3.1986482 8.9998734
874 30.09508 5 83.67576 2.7043361 5.8459248 3.9082560
875 35.38084 1 76.62036 3.1533290 8.1961712 8.1870519
876 53.37289 4 83.75200 4.5998833 2.7256106 7.1710916
877 38.35111 1 84.28433 4.8258569 5.5980549 6.0097177
878 42.75532 4 79.61146 2.7455071 7.3371589 4.8624701
879 43.09331 3 85.88327 3.5083576 4.3746887 5.6123550
880 45.76035 5 89.66381 4.4890205 8.0264109 7.0109095
881 53.22559 4 82.70453 3.8532235 9.8567915 11.1713260
882 47.64294 2 84.08604 5.8420612 8.4992031 3.1005357
883 47.49803 3 84.53564 3.8609141 7.2675700 9.0304412
884 55.81104 5 82.72908 1.7640216 9.4086321 7.0741981
885 69.69223 4 82.56185 4.2274504 6.7575702 4.5984155
886 62.65121 2 77.99084 3.1966232 7.8179837 4.0763752
887 54.87173 2 77.27867 3.1802325 5.1414055 8.0630741
888 46.94968 1 86.21446 3.4978946 4.6238857 6.3460691
889 37.08350 1 83.93710 3.3676946 5.6057085 1.9289990
890 43.50548 2 86.11216 3.3415863 4.7433863 8.2020842
891 35.07728 4 79.49790 2.3509760 6.5699699 7.1903897
892 45.16981 2 75.68860 1.9092795 3.8519226 8.7554919
893 51.14135 2 88.38969 3.0565006 8.8511052 6.7961403
894 60.80142 4 75.58417 2.2420473 5.7287455 6.2176785
895 67.86677 4 78.44132 4.1711878 6.3926819 8.6482492
896 56.31859 3 80.66235 3.1157836 5.9350462 8.5452692
897 42.67337 1 79.91019 5.0319376 10.6794622 4.9243227
898 50.26849 5 77.93701 3.6173388 8.7016745 7.5540058
899 52.52599 2 84.65485 3.4674623 6.9353666 8.5157885
900 48.54980 3 88.45476 3.5465518 7.6149050 8.3721847
901 45.04135 1 82.04073 2.8703604 9.0668959 6.3645244
902 37.31211 3 82.57317 6.1761879 5.7222295 5.8116536
903 37.24525 4 74.09907 1.8316259 7.6505339 3.3990340
904 41.88259 5 84.28052 1.2771862 9.8771885 6.8660283
905 34.26564 3 79.99068 4.5074074 4.0034435 7.3316296
906 51.58419 3 82.26332 3.7881135 5.6449636 6.6670338
907 39.10686 2 81.19952 3.8753869 6.7371362 5.7529264
908 52.96814 4 81.68678 3.1744769 5.9145205 8.0376032
909 36.75481 3 81.04477 4.5112690 6.3762107 4.0405591
910 54.16626 2 84.77422 4.7686916 9.4859967 11.7437538
911 43.04000 5 76.38265 3.6194614 5.7593551 7.9200098
912 44.14754 1 77.34155 2.6158456 7.0166790 10.8257829
913 58.26376 5 73.52607 3.4148959 9.7871241 7.2586781
914 48.01278 2 82.86455 3.1732078 9.0678946 9.1058638
915 37.21135 3 75.77306 3.0219179 6.0494375 9.7776645
916 54.41807 4 81.55087 1.7977286 6.6709418 6.7177688
917 34.97244 3 80.38919 2.9440550 8.0249080 4.2029653
918 53.64001 5 84.39454 3.2077970 6.5533790 8.0268145
919 54.04060 3 75.87546 5.4716404 8.5173027 6.0124135
920 45.81960 5 85.78882 2.7904062 3.5577122 10.6682326
921 47.24746 2 86.07414 3.4762205 10.0985178 8.6855351
922 44.99092 2 87.08245 3.7147155 7.7655026 9.4617549
923 40.61041 4 86.00455 3.3443504 8.3586723 7.5122590
924 37.17581 1 86.86298 2.9562517 6.4769322 5.6448540
925 51.73030 5 78.47274 5.4058100 6.5347137 5.4324609
926 44.88209 5 84.73280 2.7540215 7.5240467 7.9881548
927 57.43417 3 88.65652 4.8268341 9.5796498 7.0029853
928 60.62569 2 73.59535 5.7792625 7.3672608 4.5724606
929 61.75756 1 84.31774 3.9226029 7.7727728 7.5184768
930 66.00446 2 90.00654 3.6260785 4.2500173 12.3372083
931 62.63145 2 80.10395 4.2070231 8.3940390 10.9672302
932 37.55719 3 85.34575 4.3420998 5.4461186 7.9687117
933 34.53511 2 88.10750 3.1904654 9.3804650 8.5965917
934 45.32571 1 74.68889 2.9971185 8.3677744 8.0427739
935 57.06290 1 80.36196 3.3385507 3.2785233 5.4754809
936 51.91125 3 86.36567 2.5668651 5.8496256 7.4970177
937 66.24627 2 82.94648 1.1137272 9.0913870 8.3199318
938 43.67610 5 85.43291 4.2975788 2.8907164 10.9582920
939 52.94562 4 80.70573 3.8504856 5.8034419 7.1857857
940 44.60522 2 80.48953 3.1278931 7.9522387 9.1945797
941 40.21895 2 85.95312 3.1120259 6.3778117 6.2181331
942 51.75441 5 81.27515 4.1742145 3.6696279 8.0271773
943 42.97734 3 83.42390 4.4264543 10.8006808 7.4127568
944 46.55055 4 81.10484 2.5453244 6.1241233 4.7573405
945 58.90048 3 85.53059 2.1876224 9.2037463 8.9037418
946 41.37386 1 76.60557 3.4202223 6.9920946 7.5288747
947 78.54108 3 86.94004 3.7277430 5.9205333 9.8656261
948 44.08628 2 80.24320 2.1456971 5.6008434 7.3776144
949 47.45327 3 90.45309 4.8425451 9.0370475 9.7495545
950 44.80431 4 83.42090 1.7182934 5.9516839 4.3721490
951 63.17391 4 84.82738 2.3860679 3.7880679 6.9170430
952 54.58319 4 84.52427 5.1495628 3.2126832 13.0700417
953 57.51127 1 76.43699 2.8000532 9.9090197 7.6513594
954 35.26754 2 83.47296 3.8225864 6.0943352 7.6840807
955 41.64071 2 81.61953 3.7820900 10.0410446 8.5863396
956 54.89231 2 82.03873 3.0097630 3.0155825 6.2543986
957 42.57216 4 87.07736 3.5230847 5.4924684 9.8532358
958 42.99622 4 73.61639 5.2190390 6.6229651 8.8685580
959 47.51265 2 74.64338 5.6348201 7.6395124 7.7537400
960 57.94479 1 76.92654 3.6901346 9.4580291 6.0860599
961 58.51823 2 71.22621 4.2821240 9.6775231 6.9123008
962 47.78565 2 75.16418 5.7770230 8.6510312 4.3552889
963 51.46027 1 86.78604 4.5568494 6.7308955 8.5280445
964 60.81779 2 87.60268 2.5930284 9.6683395 5.8158079
965 53.53323 1 89.39363 3.8605466 6.8221087 10.1119708
966 42.44806 4 82.48384 4.7661699 10.5392162 9.0531318
967 37.69803 5 79.42302 2.1477750 7.6402663 8.1991105
968 48.73752 3 80.87094 3.3554594 10.2080562 4.4359537
969 35.35059 1 84.89105 4.0765857 5.4459593 8.4447857
970 42.70194 5 79.98984 3.3755448 6.3454174 7.3343930
971 55.35773 1 76.52426 3.6224508 10.7877865 10.1760038
972 55.43045 5 88.83108 4.2899530 9.1189734 6.6470060
973 26.62772 5 83.04717 2.5708000 8.1028985 7.1153128
974 45.73366 2 68.79073 3.5351618 6.7516497 7.3622686
975 52.13506 1 94.65064 3.1395501 9.2760745 8.2182821
976 43.08257 1 88.40774 3.3191175 5.6290908 9.1289169
977 54.77944 2 81.75406 4.0964773 7.7261348 10.3349274
978 45.62512 1 84.02543 2.4975274 4.4066274 7.1074949
979 43.00853 1 79.01122 4.5991437 4.1160013 7.8926685
980 44.31285 1 77.10130 3.8122232 7.3110954 6.4186801
981 42.95927 1 88.25933 3.8326659 6.7063206 7.2824503
982 42.04893 4 78.59499 3.8781282 7.6183948 8.9088929
983 56.05578 5 75.05215 3.4026211 4.1808148 12.6099407
984 32.97431 2 82.41467 3.3814376 5.9865169 7.6989604
985 58.07549 1 85.44135 4.2998937 9.8993175 6.1461521
986 49.02327 5 84.57059 2.8374304 9.8111112 8.4892711
987 50.17082 2 85.39825 5.4112459 8.7806603 9.6680561
988 38.84675 4 86.78064 1.1301344 5.1270591 8.4907934
989 43.90190 1 83.91684 4.4341594 5.8800995 4.0036274
990 55.34405 4 81.10982 3.8905430 8.2155127 8.5192075
991 58.95242 5 81.03267 3.1844040 3.7671304 4.7316305
992 58.17359 4 80.89988 5.2389952 10.2475409 7.1252860
993 48.35171 3 83.93170 3.2579357 6.6012317 9.4940495
994 47.49903 2 83.25690 6.0102385 9.0187501 8.8587211
995 44.70014 1 87.54552 4.1113977 7.8183328 5.6744143
996 63.39559 1 84.20453 5.8448813 8.1387720 9.1982448
997 46.42234 2 77.73116 2.6131429 7.4868229 12.0925232
998 52.36226 3 86.86936 3.8167712 5.0658870 7.8746262
999 57.25032 5 81.79634 4.8805877 10.0530263 4.2717627
1000 43.86843 5 81.06175 5.1306116 5.4683743 6.3317187
TestAnxiety ExtraCurricular MathSelfEfficacy Dropout
1 7.91338288 11 8.3972667 0
2 3.77536586 3 5.1780416 0
3 4.92243798 6 6.6849651 0
4 7.33046731 6 6.1152394 0
5 5.25460618 6 8.0659612 0
6 6.79555709 9 8.7530208 0
7 7.70115541 10 6.0773721 0
8 7.24658122 12 8.1404577 0
9 6.87165843 13 9.2059115 0
10 9.58208793 7 6.8522319 0
11 6.73197361 1 5.0029227 1
12 8.16052624 9 8.9018375 0
13 9.36549101 4 7.5416236 0
14 5.08389690 11 4.8952751 1
15 9.63861114 1 7.9362788 0
16 7.08138092 4 3.7260852 0
17 4.13200135 12 5.0386727 0
18 6.65082487 4 6.7819011 1
19 4.36132337 11 6.0474270 0
20 10.22835542 8 5.8132823 0
21 5.64138873 13 7.0401239 0
22 8.87367547 15 9.1460261 0
23 2.97810216 9 9.4499768 0
24 7.69554247 12 5.0568948 0
25 7.77668544 1 10.3291219 0
26 8.82240385 9 7.3719825 0
27 7.76392495 14 5.5231006 0
28 5.35217295 13 5.6648584 0
29 6.17956804 11 3.4618155 0
30 8.83714278 15 9.5280641 0
31 6.16442273 12 7.2467399 0
32 10.95242535 12 7.2774674 0
33 7.64340782 9 2.0529581 0
34 11.26574665 7 3.6158632 0
35 7.44534351 1 3.6012407 0
36 9.08676607 1 7.1973278 0
37 5.01139545 1 6.5943279 0
38 8.29866872 15 6.5349906 0
39 6.15375286 14 6.0318905 0
40 5.48538633 2 7.1181898 0
41 9.20190249 8 5.7869429 0
42 5.99745861 13 4.2654163 0
43 6.92871834 10 7.3981999 0
44 7.10809561 11 5.0248592 0
45 9.77940155 11 5.8655596 0
46 6.96367803 8 8.0182385 0
47 6.10036438 13 5.8578707 0
48 9.32678988 8 12.7784240 0
49 6.03359415 9 6.6064724 0
50 8.08119425 6 4.8136820 0
51 5.53229157 9 7.9646258 0
52 8.62294329 7 6.7814756 0
53 9.33765237 7 6.6078645 0
54 8.74271282 15 10.0840572 0
55 8.57397255 9 6.4385291 0
56 4.63543155 1 8.0035813 0
57 5.97917805 14 5.9771976 0
58 5.94604986 13 4.8635897 0
59 5.57186961 10 5.5484857 0
60 10.73280839 15 7.5437135 0
61 6.89551832 6 9.7212697 0
62 6.29716286 12 4.8687452 0
63 5.82448078 11 9.7326279 0
64 7.33587577 10 7.6609526 0
65 5.79633395 9 8.3636903 0
66 10.59478133 3 8.7767231 0
67 3.92865611 13 6.9028586 1
68 4.25860662 12 10.4831658 0
69 5.07845900 12 8.2862801 0
70 5.18033370 6 4.9425415 0
71 6.26671505 1 2.8907442 0
72 5.61402489 14 6.5161805 0
73 6.52729198 1 6.8347687 0
74 5.18932517 8 4.7160473 0
75 7.28906622 11 7.5487647 0
76 13.32923081 11 8.1818099 0
77 5.92149601 9 5.8290162 1
78 8.00001950 4 8.1851330 0
79 8.45836537 3 7.1491385 0
80 6.93038294 14 8.7846512 0
81 2.97951911 9 7.3809835 0
82 7.84872503 14 9.8549231 0
83 7.13471669 9 3.4831897 0
84 9.52392935 8 6.3279760 0
85 6.37438698 10 5.0172786 0
86 4.55440166 11 7.4225010 0
87 7.24185148 15 5.2817994 0
88 9.35927070 5 6.8496725 0
89 8.22697135 3 6.0875112 0
90 5.91961611 11 10.9275901 0
91 6.64930665 14 9.7977212 0
92 5.33379838 3 3.1997376 0
93 7.87805640 14 9.0842574 0
94 2.76845867 1 6.2474112 0
95 5.31063900 14 7.5181515 0
96 9.04821598 2 6.7618464 0
97 8.99431019 7 6.5893756 0
98 4.29247306 5 5.0021902 0
99 4.96602973 14 3.1915604 0
100 6.35305532 5 8.2493327 0
101 8.74407419 6 8.9393135 0
102 9.42850872 13 5.5902629 0
103 7.30633210 4 6.6178605 0
104 9.70243209 2 8.8801135 0
105 7.86873461 4 6.4054766 0
106 9.76598046 8 6.4347158 0
107 5.81039762 7 6.2162937 0
108 8.75889685 15 9.3550554 0
109 6.54532191 1 6.9392895 0
110 8.06614555 3 7.8417465 0
111 5.90530348 4 5.7922693 1
112 8.52554471 6 7.6626267 1
113 6.26040133 12 10.5027923 0
114 6.00195218 4 11.0029230 0
115 5.24478748 15 9.4119215 0
116 9.41522952 2 9.6731960 0
117 2.59516656 5 6.8919274 0
118 8.36358440 14 6.6902181 0
119 5.87249614 1 5.5280996 0
120 9.14129249 9 5.5806341 1
121 7.42474363 11 6.7538161 0
122 7.08590364 1 7.6567443 0
123 8.38952546 6 5.6000214 0
124 8.57790452 4 8.2445028 1
125 3.03326226 1 4.7568559 0
126 8.66118083 2 6.4042337 0
127 8.46931826 9 4.7449021 1
128 4.92411616 3 8.5348787 0
129 5.82020041 10 9.9251703 0
130 5.70917296 13 7.8662415 0
131 8.54779881 11 6.2921477 0
132 5.83731390 5 6.6829479 0
133 7.85790091 8 8.1658779 1
134 7.06427473 4 5.3349479 0
135 6.28584747 14 7.5005887 0
136 8.49260424 11 6.2478566 0
137 6.34040805 3 6.5676580 0
138 8.35134857 10 5.3418832 1
139 8.02333375 13 10.3857957 0
140 5.42719436 3 8.3031007 0
141 7.12345194 13 2.4602898 1
142 8.91261525 7 7.3280262 0
143 4.26268758 6 7.7095810 1
144 9.22597674 4 3.9431966 0
145 5.66955745 3 8.8156944 0
146 3.62831692 6 12.5566694 0
147 8.65155043 9 8.9549559 0
148 11.73492517 3 5.3729600 0
149 5.86066216 6 7.9529061 0
150 6.87969975 1 7.4718195 0
151 8.59922782 6 10.3770762 0
152 8.34849103 15 4.1437295 0
153 6.53190040 2 5.1995878 0
154 6.26542285 12 8.3395182 0
155 9.03166695 15 7.7449582 0
156 7.76843347 10 8.6865106 0
157 5.30465998 11 6.1699887 0
158 9.46329145 14 5.9790425 0
159 6.12524004 15 8.1231793 0
160 6.96795486 3 3.2138523 0
161 8.65214705 11 5.6513849 0
162 6.12052055 7 8.0014200 0
163 6.10449228 2 12.1517678 0
164 7.64773771 12 8.6791786 0
165 6.83492279 12 7.3217960 0
166 8.73520746 13 4.6128375 0
167 6.89388717 7 6.5143618 0
168 7.05522737 5 6.2675235 0
169 7.51797645 14 11.0448462 0
170 6.09405374 13 8.1233421 0
171 10.63292465 10 7.1000613 0
172 5.51206380 10 6.1490492 0
173 9.76955744 6 6.3484507 0
174 7.12643771 13 5.5621876 0
175 7.04986718 8 6.0082325 0
176 7.87004211 5 4.8051858 0
177 7.95408816 13 7.2485377 0
178 7.19132971 1 5.9734223 0
179 6.93652068 6 8.1899177 0
180 6.24501235 3 7.7681187 0
181 6.01129070 14 6.0243635 0
182 6.30277502 15 6.0842536 0
183 7.81087915 9 12.3195837 1
184 6.22125549 8 8.7154398 0
185 4.85447274 12 6.5527578 0
186 8.40233629 9 7.5278667 0
187 7.00973654 7 8.7697048 0
188 6.24020345 2 4.1597509 0
189 3.84361591 8 9.8499728 0
190 8.39915204 13 4.0307188 0
191 6.43321735 11 8.7126130 0
192 4.33606029 12 7.6498942 0
193 5.07896531 9 4.5606704 0
194 6.28292467 11 7.7415520 0
195 8.31940432 2 6.3766619 1
196 4.39595375 13 5.8459147 0
197 7.48589820 15 7.7621159 1
198 8.25935894 2 5.7003999 1
199 4.80960758 2 6.8776428 0
200 7.23714732 7 7.8662863 0
201 6.40828663 11 5.5052846 0
202 6.85353579 14 9.3777536 0
203 4.90967849 9 9.4036327 0
204 6.40399385 4 4.4573193 0
205 5.45589738 2 8.3981226 0
206 10.16723535 10 9.2757188 0
207 6.15631697 1 6.1578030 0
208 4.24120258 4 9.5797810 0
209 4.41410598 4 7.7039760 0
210 7.92982290 12 5.4837887 0
211 6.19808822 14 4.7868287 0
212 4.46724015 8 8.6061847 0
213 4.62003345 7 9.4342619 0
214 7.75397977 14 4.3700565 0
215 7.29699741 14 8.7362421 0
216 8.75961525 2 6.9574693 1
217 4.49121723 6 6.7291165 0
218 9.49634609 6 3.7926841 0
219 7.53711483 13 10.5559295 0
220 8.61222322 1 7.6151914 0
221 8.46187538 14 5.9767580 0
222 5.57655900 13 4.9208497 1
223 9.22367949 13 6.9998418 0
224 6.21696663 11 3.2105414 1
225 5.03225504 7 5.9594609 0
226 10.00151251 5 7.1601597 0
227 6.13726740 13 10.9448441 0
228 10.00533224 8 7.5491811 1
229 6.73774085 14 8.5401090 1
230 6.52304276 3 11.3070546 0
231 3.80358335 4 5.7440940 0
232 5.46796410 10 5.9943706 0
233 4.26382867 7 9.0583183 0
234 4.17329995 12 6.5012150 0
235 8.65603972 5 5.2130009 0
236 4.52396232 13 9.1544562 0
237 3.52311078 3 8.6103762 0
238 9.43199415 6 7.7620625 0
239 5.11341718 4 6.8362936 0
240 7.81523866 10 5.0495509 0
241 7.59576891 7 9.1028266 0
242 7.17782360 1 4.7466194 0
243 6.90523361 15 7.7859606 0
244 4.87062993 5 5.9110906 0
245 10.60991998 5 9.5233964 0
246 7.63572030 8 8.0699887 0
247 7.66983816 13 7.5084791 1
248 9.36792365 13 7.2794065 0
249 7.39080602 15 9.1672085 1
250 4.38453121 5 4.3125593 0
251 4.73545191 12 10.0733067 0
252 6.13991957 15 5.1351067 0
253 5.52339937 1 9.8927088 1
254 11.39930861 14 3.2351602 0
255 8.86505071 2 9.5434618 0
256 7.43348134 15 6.7835573 0
257 6.31869658 15 7.5726980 0
258 5.56756710 14 5.5793045 1
259 5.71936770 13 7.9456408 0
260 9.86922268 3 4.2943662 0
261 10.71314564 3 5.7854746 0
262 7.79170682 12 6.0532514 0
263 7.90855323 12 10.8491305 0
264 9.57138262 5 7.6350701 1
265 7.48772053 12 7.2590996 0
266 6.13763755 15 5.6789035 0
267 9.12812538 4 7.9278314 0
268 7.62006233 3 5.7605665 0
269 6.92084328 13 6.5901131 0
270 8.52692772 8 9.3761036 0
271 6.02105537 10 7.8562985 0
272 9.74715345 14 8.1398130 0
273 4.28583050 2 3.5868826 0
274 5.12535281 12 7.4386711 0
275 7.04244317 15 7.1173680 0
276 5.62691147 14 8.0060130 0
277 7.51347198 7 13.8950730 0
278 6.75909113 2 4.3574190 0
279 6.81830695 12 7.2970141 0
280 8.49514037 14 8.3787228 0
281 5.53936518 7 7.1573967 0
282 6.32855564 6 5.7787642 1
283 4.63174239 12 7.4511241 0
284 6.02694042 10 6.9599203 0
285 7.72472791 12 4.3121927 0
286 8.29198458 2 8.7372651 1
287 6.19097023 12 7.0689001 0
288 6.07641835 6 11.8466860 0
289 7.52292462 10 4.5235331 0
290 8.10835879 2 5.5392402 0
291 8.41777490 7 10.6451242 1
292 7.78805602 11 7.1203820 0
293 5.17914764 3 6.0438583 0
294 6.59885918 15 6.0681588 0
295 8.77916595 4 7.3530980 0
296 6.27077828 12 10.7343875 0
297 8.31701094 14 8.7550255 0
298 6.52085902 2 4.4765576 0
299 7.39581145 7 5.0594925 0
300 4.67058612 8 8.8844535 1
301 5.62262272 8 5.3198519 0
302 7.90786032 4 5.4945553 0
303 6.69691644 12 8.8273466 0
304 6.37738052 12 9.9169787 0
305 5.32683834 12 6.7986646 0
306 7.68435762 12 7.4726438 0
307 8.38842632 11 7.6482221 1
308 9.86822360 9 8.6588929 0
309 3.94463567 1 7.9989445 0
310 8.47162388 13 5.9089396 0
311 8.43990815 4 5.6424762 0
312 4.57458947 6 4.1101501 0
313 7.90143398 11 4.8579729 0
314 7.74970084 1 8.0354374 0
315 4.99568255 13 7.6900509 1
316 11.69118521 2 5.2652885 0
317 4.39648864 7 8.4912541 0
318 4.89717570 14 7.1693639 0
319 6.42300929 15 7.2871836 0
320 4.64736844 6 7.9538619 0
321 5.90044432 5 6.0725356 0
322 8.19218174 4 7.8255144 0
323 8.29628148 13 6.8804452 0
324 3.50541793 2 7.3841856 1
325 1.41730828 4 6.8114995 0
326 2.30996172 7 3.4688289 0
327 10.20403528 6 10.2209711 0
328 4.47014829 1 4.7286972 1
329 3.16656933 14 8.9594456 0
330 4.44181940 5 7.1680911 1
331 8.52652344 8 7.0097520 0
332 5.31407218 2 10.3226323 1
333 9.93279549 11 10.0162852 1
334 5.79005635 6 7.2166122 0
335 4.23432670 2 5.6168613 0
336 7.07341431 11 4.7314400 0
337 5.59569340 10 2.9238426 0
338 4.87984265 12 10.1366082 0
339 8.47319026 4 6.2914693 0
340 9.26984713 4 6.8496064 1
341 6.05120715 3 9.3018245 0
342 6.97377903 3 9.0550548 0
343 5.31863644 7 7.2580094 0
344 4.68772672 11 5.1742339 0
345 9.05056102 3 6.5210746 0
346 6.67868190 2 5.9865257 0
347 6.48205714 5 5.2059866 0
348 8.39465174 4 7.0920590 1
349 3.48857355 5 7.6851722 0
350 9.21982439 15 6.4146841 0
351 11.55206577 13 4.3560886 0
352 6.90170572 12 10.5536774 0
353 5.81393253 5 6.9957450 0
354 7.57189443 5 6.1600693 0
355 8.44103347 9 6.8581479 0
356 5.92346785 11 7.9674007 0
357 8.53154702 4 7.9900669 0
358 8.76487995 11 9.2588639 0
359 6.90687938 6 6.8995346 1
360 8.41007471 4 5.8349758 0
361 5.07008306 13 6.4972106 0
362 4.92054483 11 9.3150712 0
363 7.13826547 14 6.0650911 0
364 6.14369104 14 7.2250443 0
365 6.14958949 5 7.8072008 0
366 5.84601265 6 7.4926006 0
367 8.04455752 9 8.4282374 0
368 8.83327798 4 12.0044409 0
369 9.57511534 9 8.3732988 0
370 5.79498358 3 6.1932093 0
371 2.33793013 9 6.3813981 0
372 6.97802739 8 8.0960074 0
373 8.16037936 9 6.8076998 1
374 8.39685872 2 9.1751180 0
375 2.58341585 3 7.7723123 0
376 8.17524721 14 6.6322048 1
377 4.10990421 14 13.2422747 0
378 6.84039687 10 6.2096082 0
379 11.53632756 1 11.2009789 0
380 8.46631675 10 3.9443356 0
381 12.48589411 3 6.3721736 1
382 8.91617162 2 4.7911210 0
383 7.31048507 15 6.9079381 0
384 8.16731433 2 6.3549685 0
385 10.91089691 3 7.3632300 0
386 8.22389987 11 6.7861862 0
387 3.97586435 12 9.0496731 0
388 5.87700510 11 8.9150975 0
389 6.40728195 12 5.0935682 1
390 6.21156319 14 10.3672542 0
391 10.75166202 9 7.2702370 0
392 9.01299397 14 6.7676693 0
393 11.43006654 11 4.9975164 0
394 7.36610315 9 10.6191293 1
395 11.58136578 8 6.0478453 0
396 10.45894470 9 6.2907509 0
397 7.51386171 2 8.5522229 0
398 7.74898300 1 8.7659891 0
399 6.47475621 2 8.0020124 0
400 9.55939451 13 7.0440488 0
401 4.40265396 11 5.6991952 0
402 8.00404807 8 8.2628797 0
403 7.77552297 3 5.2905836 0
404 7.03485373 3 4.0908014 0
405 6.08597003 11 9.8308334 0
406 8.15585042 5 10.2305066 1
407 7.84403994 1 5.5231204 1
408 7.13162053 2 12.2279691 1
409 7.91156542 8 4.1386463 0
410 4.69292936 10 10.7659743 0
411 6.66560265 4 7.0770675 0
412 7.55694105 6 10.1816112 0
413 9.53312220 11 7.0945838 0
414 8.46351012 6 7.6535403 0
415 7.15578417 10 5.6575205 0
416 6.99496619 11 9.9691903 0
417 6.25551760 5 4.0915657 1
418 9.18832908 5 5.5310682 0
419 8.73285407 4 6.9267533 0
420 7.57360424 7 6.5028272 0
421 7.13270231 14 7.5920129 0
422 7.30902579 14 7.9920954 1
423 2.54049480 1 9.3000380 0
424 4.08066177 2 7.6409895 0
425 5.25703778 12 9.4310445 0
426 7.35996042 7 4.8844028 0
427 6.59793948 6 6.6585304 0
428 10.31763443 6 8.2606916 0
429 8.66779437 10 7.2511533 0
430 6.54561339 11 3.3891820 0
431 7.03158906 10 4.8484402 1
432 10.44410391 12 4.9712273 0
433 7.25942927 9 8.2150010 0
434 9.04909782 5 6.7064666 0
435 7.25846853 13 7.7173315 0
436 8.31498631 15 10.2256343 0
437 9.46396411 5 4.3700777 0
438 3.10237472 4 7.6908020 0
439 9.84705979 12 7.6813773 0
440 7.56145278 13 7.9274282 0
441 5.84821105 10 7.3177955 0
442 9.96501037 13 7.2230751 0
443 8.60047899 9 5.2862389 0
444 8.11422992 11 11.5558407 1
445 10.63081275 1 9.4082034 0
446 7.43947229 12 8.3720025 0
447 5.61874343 14 7.7539922 0
448 6.95174544 3 4.1855638 0
449 5.39842931 15 5.0086093 0
450 5.68141962 11 8.0611367 0
451 7.97294254 6 10.0505545 0
452 5.70509629 6 8.5688487 0
453 5.01677549 4 4.4826911 0
454 4.93695254 8 8.5513163 0
455 3.31566697 14 4.6944092 0
456 6.56888326 11 5.6616286 0
457 9.58216517 8 4.3790721 0
458 7.28101408 3 5.9707935 0
459 5.54006128 9 6.2779853 0
460 7.85396163 14 2.4648502 0
461 8.65983746 15 9.8448673 0
462 4.87812490 12 5.6049200 0
463 11.55879601 3 3.6736990 0
464 10.94800518 8 1.1935498 0
465 5.83674365 3 2.9740609 0
466 7.22366760 4 7.3989599 0
467 8.23733670 13 8.1118312 0
468 9.56676463 9 7.7078079 1
469 6.71302807 4 10.4076628 0
470 7.28002736 2 6.2144909 0
471 6.90671789 14 5.9371749 0
472 8.66475582 9 4.9644291 0
473 6.55359957 14 7.0634739 0
474 7.15450125 11 6.1051357 0
475 8.86869791 13 9.8412895 0
476 12.17394515 14 4.8809460 0
477 5.52751873 15 5.0581826 0
478 4.41725089 12 5.9030860 0
479 6.02577828 9 8.7728142 0
480 6.54944912 6 6.3031453 0
481 5.14483840 14 8.0711698 0
482 8.22008850 4 9.9613423 0
483 6.35631908 6 7.8956667 0
484 11.30227926 8 6.4468701 0
485 1.72919508 10 7.1570639 0
486 0.03323368 6 7.7444937 0
487 9.73138938 8 6.7037031 0
488 8.08243453 9 8.8515666 0
489 4.66774066 7 4.0532639 0
490 5.44818015 3 8.2673245 0
491 6.40979464 2 5.8979021 0
492 9.44152170 15 9.2560937 0
493 10.81163813 5 10.6175318 0
494 8.11733508 14 7.8776306 0
495 8.47935234 7 4.4268723 0
496 4.64978464 9 5.5407271 0
497 5.85773390 3 7.5786717 0
498 3.30535807 6 9.1409422 0
499 9.89531654 5 5.3498498 0
500 7.25933995 15 3.4985255 0
501 7.28479693 4 2.2696005 0
502 5.14230717 8 9.2240915 1
503 7.04696255 6 8.1600806 0
504 8.24615729 13 6.1795657 0
505 6.91261142 14 9.6689450 0
506 4.42384034 9 9.8386721 0
507 6.35261567 11 9.0896165 0
508 3.78321544 11 10.6205767 0
509 8.90554503 10 7.7427289 0
510 7.63265672 14 4.3175684 0
511 8.02079932 7 7.5341845 0
512 6.15775286 10 5.8578686 0
513 7.32595659 8 5.3935316 0
514 6.29105373 14 11.3175258 0
515 8.08200721 15 8.9896345 0
516 8.77615165 4 8.6898657 0
517 7.52206594 13 11.2729236 0
518 6.92411282 3 4.9788233 0
519 2.93615174 6 5.0189972 0
520 9.48421565 8 7.5373424 0
521 7.16350670 10 8.4415108 0
522 6.79561615 6 9.9127534 0
523 6.89475365 12 4.1379867 0
524 6.54270703 9 10.8294004 0
525 7.72236134 5 9.2675968 0
526 9.12622950 8 5.5508596 0
527 11.58248479 13 6.0093652 0
528 8.50566505 7 7.0196531 0
529 7.71889610 5 2.6329295 0
530 6.61551744 14 6.1224023 0
531 4.80842143 4 5.1750880 0
532 7.62846672 8 6.1040908 0
533 5.75613750 13 7.4674190 0
534 3.65967179 3 6.0253107 0
535 9.91316917 2 7.9731852 0
536 6.90200922 10 9.0691618 0
537 8.35538617 10 5.1618579 0
538 9.93697510 9 10.8785406 0
539 4.82348338 14 7.3954440 0
540 7.21658913 11 6.8967211 0
541 8.24209160 1 1.6696358 1
542 4.92425201 3 5.3000787 0
543 6.37719556 7 10.2334561 0
544 6.83011632 8 4.7337767 0
545 8.78545574 3 7.7241434 1
546 8.11937441 3 4.6116904 1
547 6.78195420 11 6.0837286 0
548 8.78266609 9 7.1377959 0
549 5.26654309 5 6.3021183 0
550 4.32789456 1 7.8711366 1
551 4.30632900 7 7.0248730 0
552 5.70838830 15 6.6409473 0
553 3.98364832 2 9.4505925 0
554 5.38961944 9 7.7795224 0
555 6.82110185 1 9.2887118 0
556 4.94902776 15 5.3696294 0
557 10.28731311 15 5.9140813 0
558 8.32275061 12 4.1241929 0
559 8.38931904 2 6.6798957 0
560 7.17890492 3 8.2014593 0
561 5.95320768 4 6.8340239 0
562 11.61819713 2 8.5643238 1
563 8.46730226 4 7.1079889 0
564 6.27609218 9 4.6466690 0
565 10.36550866 14 11.7918321 0
566 5.47910458 4 7.0601529 0
567 7.50797313 1 6.3802820 0
568 6.90959669 10 8.3278267 0
569 7.11297186 5 7.9383820 0
570 6.85018674 15 5.5424038 0
571 7.41117071 3 2.2038372 0
572 5.85073756 10 7.9654713 0
573 9.01040177 14 5.0206684 0
574 6.30941230 4 8.3672992 0
575 11.61941351 12 5.6569242 0
576 9.07505086 5 8.8368212 0
577 4.87234459 1 5.8420379 1
578 6.34480412 12 4.4567235 0
579 5.23363001 8 0.2963934 1
580 4.28804333 12 7.4344514 0
581 8.51353220 14 5.9210841 0
582 9.10729695 14 7.0510964 0
583 7.39327616 11 7.6625020 0
584 6.01524672 11 5.2743130 0
585 5.57156758 3 8.1202228 0
586 7.22582951 11 5.6205835 0
587 5.17662929 4 8.1109115 0
588 8.05230127 1 9.0879774 1
589 8.47042453 8 9.8508072 1
590 9.08932868 4 7.7695253 0
591 7.74532569 15 5.5147576 1
592 8.56459240 2 10.5730068 0
593 6.43059332 15 7.7169118 0
594 6.47248959 7 3.8431135 0
595 6.34605466 10 8.0185926 0
596 9.89356142 12 8.0362014 0
597 4.04699932 8 3.7333179 0
598 4.94879383 11 8.9865511 0
599 8.22346284 15 5.1021001 0
600 8.09997028 9 2.8380824 0
601 7.59385143 6 8.5204916 0
602 6.58102898 3 14.8006988 0
603 8.24264549 1 2.5167767 0
604 6.15458156 12 2.8211948 0
605 6.55825502 13 10.6845594 0
606 2.87962221 13 5.9332924 0
607 3.21946554 1 14.3075041 0
608 6.05502877 1 6.9543089 0
609 7.15237796 13 6.9764804 0
610 3.87535651 1 5.3669750 0
611 11.02513639 1 9.1971786 1
612 5.09918739 11 5.9346064 0
613 1.24919328 1 9.0018434 0
614 5.67819044 7 3.7174206 0
615 6.80687680 12 7.9118230 0
616 5.24746402 11 5.4765779 1
617 6.76499620 10 5.1309281 0
618 4.83499531 14 5.0451334 0
619 9.43634047 7 9.7227231 0
620 4.98168325 6 5.2036697 0
621 5.41639186 11 8.9413362 0
622 8.45784533 10 5.7299689 0
623 7.31162039 14 3.7354984 0
624 8.25568943 14 6.9992132 0
625 9.21355166 13 5.1255925 0
626 5.43440474 13 7.6460718 0
627 6.91921700 7 8.2333334 0
628 8.83498466 2 7.3153615 0
629 7.74773647 1 9.4311020 0
630 4.30180335 5 5.2711031 0
631 4.64003178 11 7.5923053 1
632 4.22896751 8 6.0208637 1
633 6.46683008 2 4.6520500 0
634 7.13160807 15 3.2266244 0
635 7.80970402 12 6.4375482 0
636 9.56166326 3 5.4137879 0
637 7.69574799 6 3.0079559 0
638 8.71964137 6 7.2521115 0
639 7.08366622 2 7.7417302 0
640 10.70993506 8 8.5229540 1
641 6.03473763 3 10.2409045 0
642 6.43648712 9 4.8069110 0
643 3.92505814 15 5.5172550 0
644 8.80223241 13 9.8173302 0
645 9.39926818 6 9.6173824 0
646 9.23320644 1 5.3750928 0
647 6.38424800 12 5.9618782 0
648 7.65269000 12 7.0285000 0
649 9.72548804 13 6.7657367 0
650 6.14736727 7 6.3951256 1
651 5.22499272 6 7.5049241 0
652 7.82103930 7 8.6806593 0
653 4.96936479 12 7.9646948 0
654 5.24798820 14 11.7764647 0
655 7.70234608 8 7.6502960 0
656 7.75305298 4 7.0490278 0
657 8.15118469 6 7.5210154 1
658 8.56086533 1 6.6638502 0
659 4.68560174 13 9.6370337 0
660 8.34407532 9 6.7301633 0
661 5.80779172 12 8.2622348 0
662 1.34404511 15 9.3942820 0
663 8.10062095 12 5.2851061 0
664 6.21955790 5 6.0538386 0
665 3.95940626 12 6.6080366 0
666 7.47955598 12 7.6605588 0
667 8.36284542 3 6.8353520 1
668 7.59607707 4 7.3916666 0
669 8.45562858 8 5.7404343 0
670 7.05302893 8 4.7635605 0
671 1.91978092 13 3.6801795 0
672 6.38327681 3 9.0271447 1
673 5.58078138 9 9.0843328 0
674 10.32194609 2 5.6687634 0
675 7.89920322 15 8.9067793 0
676 6.77904872 10 7.4597649 0
677 9.89251102 7 7.0044120 0
678 6.24657190 9 7.4966554 0
679 7.42326218 10 7.8621236 0
680 8.02098699 8 7.6753357 0
681 6.90703227 11 7.5057104 0
682 5.82433120 15 10.8537190 0
683 11.86533996 11 9.7178921 0
684 8.56218308 1 4.1725125 0
685 7.79574764 3 7.2207316 0
686 7.14253818 13 7.2837490 0
687 6.89644934 10 5.0704901 0
688 11.16561792 10 5.9251056 1
689 10.38573822 9 6.9545098 0
690 8.22725055 4 5.0358534 0
691 5.84032846 7 4.9357021 1
692 10.55522635 3 6.8364315 0
693 7.72146133 8 7.7519269 0
694 8.03081894 1 6.2276403 0
695 5.15566099 15 3.3326063 0
696 11.29075105 11 9.5185079 0
697 5.59490474 13 7.7399914 0
698 7.30904699 1 8.7479138 0
699 9.37989384 3 5.5018772 0
700 5.55863399 9 10.7165738 0
701 4.31528850 14 4.5954909 0
702 9.66401032 12 6.4688975 0
703 5.31259747 5 9.0478785 0
704 7.01498111 8 6.3510010 0
705 8.50693032 8 9.4742918 0
706 8.51748094 8 7.5181958 0
707 6.39355258 14 5.3949295 0
708 7.63631077 4 8.9280374 0
709 5.81456847 13 6.0900729 0
710 9.15444028 11 8.8621219 0
711 4.71226202 14 10.4127579 0
712 8.26385296 15 6.1186476 0
713 7.61254050 1 9.6030160 0
714 9.49637348 1 6.5301937 0
715 8.16981507 3 9.4715170 0
716 6.27073898 8 4.5814747 0
717 3.27624616 1 6.7415042 0
718 5.82298152 2 12.3093625 0
719 6.62542541 13 5.3047999 0
720 7.08883964 2 4.9086653 0
721 7.24152894 4 5.3006048 0
722 7.18506588 12 4.3418397 0
723 7.09318541 10 2.8438499 0
724 9.47602528 4 2.7118207 0
725 7.31419597 15 5.2949809 0
726 6.52766397 1 10.3846308 0
727 8.28915026 5 8.7547994 0
728 6.11166736 13 5.3703393 0
729 9.86224036 10 7.8405917 0
730 8.07267221 7 7.6803124 0
731 7.68698643 10 6.8784852 0
732 5.46948650 12 4.0373652 0
733 5.51743491 10 5.1054828 0
734 8.84663010 4 8.8829147 0
735 8.24975564 3 7.2593506 1
736 5.70599507 14 9.0714357 0
737 12.18358773 10 6.3742681 0
738 9.32187120 3 9.4522819 0
739 11.00327023 6 6.9622606 0
740 10.18231134 15 11.4493678 0
741 10.35879093 6 9.5750702 0
742 9.06334034 15 4.5447343 0
743 8.66263854 9 6.9573853 1
744 7.30390589 11 4.9391019 0
745 8.73514127 4 11.8784748 0
746 10.33452514 12 8.9571463 1
747 6.99316252 5 5.0897741 0
748 7.29623779 6 3.3216048 0
749 6.84828180 15 6.3127800 0
750 7.00422460 5 8.2839152 0
751 8.08590180 3 2.3048738 0
752 8.30343006 4 5.8300879 0
753 4.52596408 7 5.8410177 0
754 6.37384870 8 6.1999410 0
755 5.78800542 12 10.0064287 0
756 6.35502859 6 6.8695869 0
757 6.96799274 11 7.3869152 0
758 9.46295810 5 7.6493051 0
759 7.17045248 13 11.6918056 0
760 9.08750683 13 8.6111736 0
761 4.80959381 12 5.6311641 0
762 7.10050384 7 7.1512068 1
763 7.02855755 1 6.2619683 0
764 7.62927682 9 6.1244462 0
765 7.87895979 1 6.1206549 0
766 4.78806492 5 7.7831494 0
767 5.32053725 14 4.4645930 0
768 6.29002562 12 9.9010575 0
769 7.31843129 5 6.7128831 0
770 9.07812283 13 6.1510583 0
771 4.36052202 3 9.3353680 0
772 8.86107115 13 6.7480352 1
773 8.50225481 14 6.7190570 0
774 7.21247736 9 7.0165174 0
775 9.80307955 1 6.4413171 0
776 7.41106298 10 9.5642620 0
777 6.90571053 14 7.6899048 0
778 7.66603466 3 5.0322828 0
779 7.86736817 12 4.9470897 0
780 9.77441212 8 9.0046610 0
781 5.09758344 13 9.0302590 0
782 6.77729625 4 8.8842512 0
783 8.24436893 11 8.5489468 0
784 4.67824303 15 7.0120843 0
785 3.53564557 5 7.2064588 0
786 6.10231463 2 4.1097441 0
787 6.69689169 13 5.6397694 0
788 6.97488222 3 5.4088488 1
789 9.13716840 4 8.4528001 0
790 5.68278363 4 8.3758136 0
791 5.20340151 7 10.0004249 0
792 2.62364615 9 7.8596399 0
793 10.43591657 9 7.1684867 0
794 7.61630958 8 8.4913438 0
795 8.31786290 9 10.1418603 0
796 8.39209355 3 6.3923881 0
797 3.59666298 3 6.4588087 0
798 6.05805843 8 5.5601066 1
799 9.63962209 6 9.3863150 0
800 10.03508352 4 7.0203144 1
801 8.88547844 7 7.5575176 0
802 6.54437128 8 6.0844610 0
803 6.17826038 8 7.2574481 0
804 6.99952683 7 9.3357316 0
805 6.74363383 4 4.9504612 0
806 6.87804456 1 10.6364849 0
807 5.47658079 4 4.5931479 1
808 2.42297742 7 4.9804235 0
809 4.69318685 4 8.5922230 0
810 7.72794879 9 8.8950564 0
811 5.05748989 11 5.9176210 0
812 13.77218568 12 6.6335219 0
813 8.41958367 8 5.6199814 0
814 8.53258864 14 9.3267234 1
815 5.62703323 6 5.4386989 0
816 9.13127778 14 5.4712085 0
817 6.01593120 7 8.5892651 0
818 7.50138251 8 3.6427914 0
819 6.39959368 12 7.0742717 0
820 6.12880933 7 6.1868147 0
821 6.74507697 2 8.0360602 1
822 11.42296620 8 5.4586150 0
823 8.69948255 13 8.0608086 0
824 8.01308402 13 10.3134643 0
825 7.73730118 15 7.4770088 0
826 6.99878277 3 8.3996879 0
827 11.06613002 6 4.0559478 0
828 7.91084673 10 8.7776905 0
829 6.51160396 2 9.0437706 0
830 7.04936396 2 8.5029676 1
831 4.78092254 5 6.5333955 0
832 5.25571295 10 5.5815118 1
833 5.96186022 9 6.1872884 0
834 6.98659250 3 5.5706815 1
835 9.47872991 4 8.3891147 0
836 9.04018652 8 4.4326306 0
837 7.08180136 2 7.8663687 0
838 5.51418763 14 5.6818695 0
839 7.63057693 1 5.6285598 0
840 7.60317064 6 7.4971318 0
841 9.71093288 9 4.3992459 0
842 5.96892568 13 6.7612045 0
843 4.09208127 12 4.9950366 0
844 6.94807611 6 9.0214478 0
845 5.71329215 11 6.4391059 0
846 6.87226490 5 8.1679609 0
847 5.87422261 14 11.5106570 0
848 9.39077501 3 9.5787688 1
849 11.63051878 9 8.4915949 0
850 9.33048483 3 7.7401615 0
851 6.82686600 15 6.3737534 0
852 5.55997005 4 6.0349505 0
853 10.96316187 14 7.9885641 0
854 3.86124238 10 4.9795246 0
855 10.34289525 2 11.1024463 0
856 7.40425888 4 6.7403062 0
857 6.99596676 1 4.8244555 0
858 6.11278339 8 5.0007785 0
859 8.23492412 4 8.4948753 0
860 3.85009994 10 6.2745954 0
861 7.42963242 14 3.1056591 0
862 8.24968596 1 8.2338722 0
863 11.69752058 2 7.9047406 0
864 5.44577242 7 5.3243324 0
865 5.65007936 1 2.7222140 0
866 6.22815540 15 3.3637297 0
867 7.01588857 10 7.7225407 0
868 2.96960415 8 11.0827446 0
869 3.64044310 1 6.1630477 0
870 8.33585309 5 6.9750627 0
871 6.40498910 3 8.1664992 0
872 9.37399537 7 10.1086529 0
873 7.68109757 14 2.6587841 0
874 6.14415246 12 5.1751420 0
875 6.63440758 10 7.7487725 0
876 7.17993757 11 2.8488472 0
877 6.35444959 4 5.8100608 1
878 6.41748957 2 7.7531541 0
879 8.54901418 6 5.0452267 0
880 8.92855394 5 7.1600617 0
881 8.96315913 7 10.1642480 0
882 5.70629620 4 8.7518388 0
883 9.53497670 6 7.5774787 0
884 6.72816243 2 9.6938492 0
885 5.05082037 9 7.1119020 1
886 8.25799346 8 8.3833051 0
887 8.87213138 12 4.7476183 0
888 9.22980611 11 4.5450253 0
889 5.89710064 3 6.7050647 0
890 5.58706885 11 4.8800819 0
891 9.95427261 4 7.0572455 1
892 9.10126713 9 3.9583646 0
893 7.27834188 8 8.7901062 0
894 7.91823507 7 4.8307203 0
895 5.51592333 5 6.5357555 0
896 5.59703227 2 6.2292575 0
897 9.69353969 9 10.9830157 0
898 9.16183559 12 9.0986534 0
899 7.27647925 8 6.6941889 0
900 11.71903008 12 8.5119714 0
901 11.37301280 14 8.8008508 0
902 8.96514318 4 5.0600168 1
903 5.93086708 15 8.0392195 0
904 6.60273077 15 9.0730709 0
905 6.66859180 11 4.5497459 1
906 8.77214323 6 4.9120284 0
907 5.40908207 14 6.4759576 0
908 5.37695528 1 6.0434770 0
909 7.52567255 5 6.7089510 0
910 6.75808324 8 8.9154587 0
911 9.48636542 5 6.1421251 0
912 7.84513264 3 7.1681764 0
913 8.46483794 3 10.0656899 0
914 5.15382206 8 9.2346435 0
915 5.01726674 15 5.8164282 0
916 5.04045571 6 6.5336413 0
917 7.19910360 5 8.2555371 0
918 6.44184247 3 5.3927453 0
919 12.67715688 1 8.5190910 0
920 4.78953650 2 4.1475499 0
921 5.64912939 10 9.4310212 0
922 2.18301311 4 7.2860177 0
923 6.48442071 8 8.0457831 0
924 7.07830767 9 6.3672810 0
925 5.17511628 12 6.7417660 0
926 5.14557882 6 7.1904634 0
927 10.35627026 5 9.6663003 0
928 6.10186595 14 6.3679857 0
929 6.56637533 15 8.4360394 0
930 8.80011065 14 4.5355983 0
931 3.44682038 11 9.2047202 0
932 4.49209225 11 5.5503604 0
933 7.52463008 6 9.9821209 0
934 6.17532457 5 7.6925253 0
935 7.47232413 5 3.3106046 0
936 9.07777665 13 5.6808335 0
937 10.66938592 2 9.2308705 0
938 6.77493691 15 2.8383142 0
939 8.26520036 11 4.6182888 0
940 6.48678842 11 8.7783642 0
941 5.89604463 1 6.5230287 0
942 8.19140614 8 3.4117302 0
943 8.10429519 11 10.4391025 0
944 10.65266366 6 7.1759631 0
945 9.18853699 12 9.5390855 0
946 9.05908666 6 6.6109226 0
947 9.67636375 14 6.3328061 0
948 7.15098173 1 5.2211738 0
949 9.23319923 13 9.0061618 0
950 8.46383160 8 6.4091989 0
951 6.22377187 13 4.0782851 0
952 7.04187296 5 3.1517804 0
953 10.04179627 9 9.5793555 0
954 4.46179120 4 6.2718563 1
955 5.47598866 2 10.3787692 0
956 5.79493619 1 3.1351805 0
957 6.00804646 10 6.2088713 0
958 4.33590865 7 5.4279465 0
959 6.01692175 1 7.2429580 1
960 7.05079481 7 9.2110865 0
961 6.35961354 7 9.6164193 0
962 9.84425660 7 7.8987694 0
963 8.53417419 11 6.4456385 0
964 7.90404872 11 10.0605159 0
965 7.25568916 5 7.5455813 1
966 7.88228245 4 10.6712719 0
967 4.35763484 5 7.6365686 0
968 8.38831459 10 9.6262420 0
969 7.10020409 8 5.9565316 0
970 7.99547059 9 6.6000136 0
971 7.73398512 2 10.9461425 0
972 8.05439768 6 8.3139269 0
973 6.33618076 9 7.5026714 0
974 8.60245885 12 6.5798378 0
975 5.89716813 15 9.4079148 0
976 8.27970095 7 5.0939625 0
977 4.17642782 11 7.5896588 0
978 9.63435558 6 4.7359940 0
979 9.37628384 15 4.4261299 0
980 4.93306339 3 8.0556718 0
981 5.39413809 6 6.4164723 0
982 5.15028197 7 7.3709867 0
983 6.21762263 4 4.9069408 0
984 7.55328460 2 5.6613804 0
985 8.57186739 3 9.6540860 0
986 7.48639183 13 10.1941338 0
987 5.06472229 4 8.4962835 0
988 8.92408668 15 4.6705280 0
989 8.39064197 7 6.3984964 0
990 8.46601164 13 8.8285250 0
991 4.52927384 12 3.6792858 0
992 8.11495887 8 10.3784899 0
993 10.71226124 14 5.9683000 0
994 7.24380902 7 9.7606527 0
995 6.46908889 9 6.3540172 0
996 9.00445751 3 9.1219823 0
997 7.90712600 1 8.3223347 0
998 5.59374259 14 5.6034202 0
999 7.62303860 5 9.4213477 0
1000 7.35095551 4 5.3921849 0
★ Convert DV to a Factor
# Convert Dropout to a Factor
dropout_data_new$ Dropout <- as.factor (dropout_data_new$ Dropout)
# Provide a description of what the values 0 and 1 represent.
attr (dropout_data_new$ Dropout, "label" ) <- "Dropout (1=Dropout; 0=NotDropout)"
# Check the structure of variables in this dataset
str (dropout_data_new)
'data.frame': 1000 obs. of 10 variables:
$ SES : num 37.3 58.9 46.9 58.2 60.2 ...
..- attr(*, "label")= chr "Socioeconomic Status (1-100)"
$ ParentEdu : int 3 5 4 1 3 2 4 2 1 2 ...
..- attr(*, "label")= chr "Parental Education Level (1-5)"
$ Attendance : num 82 80.2 87.6 86.3 86.5 ...
..- attr(*, "label")= chr "Attendance Rate (percent)"
$ HomeworkHours : num 4.9 2.02 2.32 3.58 3.28 ...
..- attr(*, "label")= chr "Hours Spent on Homework (per week)"
$ Motivation : num 8.33 5.53 6.66 5.8 7.9 ...
..- attr(*, "label")= chr "Student Motivation (1-15)"
$ PeerSupport : num 7.05 7.79 8.35 6.36 11.73 ...
..- attr(*, "label")= chr "Peer Support (0-15)"
$ TestAnxiety : num 7.91 3.78 4.92 7.33 5.25 ...
..- attr(*, "label")= chr "Test Anxiety (0-15)"
$ ExtraCurricular : int 11 3 6 6 6 9 10 12 13 7 ...
..- attr(*, "label")= chr "Extracurricular Activities (number)"
$ MathSelfEfficacy: num 8.4 5.18 6.68 6.12 8.07 ...
..- attr(*, "label")= chr "Math Self-Efficacy (0-15)"
$ Dropout : Factor w/ 2 levels "0","1": 1 1 1 1 1 1 1 1 1 1 ...
..- attr(*, "label")= chr "Dropout (1=Dropout; 0=NotDropout)"
# View Levels: In R, when performing logistic regression with a factor variable, the first level of the factor is treated as the reference category. This means that R will treat "NotDropout" (0) as the reference category and will model the log-odds of being a dropout (1) compared to not being a dropout (0).
levels (dropout_data_new$ Dropout)
★ When calculating performance measures like Receiver Operating Characteristic (ROC) and Area Under the Curve (AUC), R has trouble using numbers like 0 and 1 as category labels, even if they are already factors, because they don’t work well as variable names in its system. To fix this, we can rename them with clearer labels like “NotDropout” and “Dropout.”
levels (dropout_data_new$ Dropout) <- c ("NotDropout" , "Dropout" )
levels (dropout_data_new$ Dropout)
[1] "NotDropout" "Dropout"
This sets the levels of the factor Dropout as follows:
★ Normalization / Standardization
In K-Nearest Neighbors (KNN), normalization or standardization is important because KNN is a distance-based algorithm . It calculates the distance between data points to determine which neighbors are closest, and those distances heavily influence the model’s predictions. Normalization or Standardization in KNN ensures that all features (predictors) contribute equally to the distance calculations, preventing features with larger scales from dominating the model’s predictions, and ultimately leading to more accurate results.
We can use the preProcess() function from the caret package in R to perform both normalization and standardization .
method = “range” is used for normalization (Min-Max scaling).
method = c(“center”, “scale”) is used for standardization (mean = 0, standard deviation = 1).
For this demonstration, we “normalized” all the predictors.
Loading required package: ggplot2
Loading required package: lattice
# Normalization using preProcess() in caret
normalizationknn <- preProcess (dropout_data_new[, - c (10 )], method = "range" ) # The -c(10) tells R to exclude column 11, Dropout.
# Apply the transformation
normalized_predictorsknn <- predict (normalizationknn, dropout_data_new[, - c (10 )])
# Combine the normalized predictors with the rest of the data (untransformed column: Dropout)
dropout_data_normalizedknn <- dropout_data_new #Recreate new data
dropout_data_normalizedknn[, 1 : 9 ]<- normalized_predictorsknn
Normalize data for Naive Bayes Evaluation
library (caret)
# Normalization using preProcess() in caret
normalizationnb <- preProcess (dropout_data_new[, - c (10 )], method = "range" ) # The -c(10) tells R to exclude column 10, Dropout.
# Apply the transformation
normalized_predictorsnb <- predict (normalizationnb, dropout_data_new[, - c (10 )])
# Combine the normalized predictors with the rest of the data (untransformed column: Dropout)
dropout_data_normalizednb <- dropout_data_new #Recreate new data
dropout_data_normalizednb[, 1 : 9 ]<- normalized_predictorsnb
Split the Data into Training and Test sets
The purpose of this split is to train a model on one part of the data (training set) and then test how well it performs on unseen data (test set). The caret package is used to split the dataset into training and test sets.
createDataPartition() is a function from the caret package that splits data based on the distribution of a target variable (in this case, the Dropout variable).
The p = 0.8 argument specifies that 80% of the data will be used for training. Note. Both 70/30 and 80/20 splits are common practices, and the choice depends on the size of the dataset, model complexity , and your evaluation needs. p = 0.8 is generally more popular, but p = 0.7 may be more appropriate if you have a large dataset and want a more reliable test set for evaluation.
The list = FALSE argument ensures that the output is returned as a vector of row indices rather than a list.
# Load caret library
library (caret)
# Set a seed for reproducibility
set.seed (668 )
# Split the data
split <- createDataPartition (dropout_data_normalizedknn$ Dropout, p = 0.8 , list = FALSE )
split <- createDataPartition (dropout_data_normalizednb$ Dropout, p = 0.8 , list = FALSE )
# Create Training and Test Sets
train_dataknn <- dropout_data_normalizedknn[split, ]
test_dataknn <- dropout_data_normalizedknn[- split, ]
train_datanb <- dropout_data_normalizednb[split, ]
test_datanb <- dropout_data_normalizednb[- split, ]
# Check the dimensions of the training and test sets
dim (train_dataknn) # Should be around 800 rows
dim (test_dataknn) # Should be around 200 rows
dim (train_datanb) # Should be around 800 rows
dim (test_datanb) # Should be around 200 rows
# Ensure the Dropout variable is a factor (Double Check!)
str (train_dataknn)
'data.frame': 800 obs. of 10 variables:
$ SES : num 0.239 0.551 0.377 0.544 0.468 ...
$ ParentEdu : num 0.5 1 0.75 0.25 0.25 0.25 0 0.75 1 0.75 ...
$ Attendance : num 0.436 0.375 0.618 0.563 0.313 ...
$ HomeworkHours : num 0.762 0.29 0.338 0.246 0.671 ...
$ Motivation : num 0.555 0.346 0.431 0.594 0.52 ...
$ PeerSupport : num 0.491 0.544 0.584 0.652 0.635 ...
$ TestAnxiety : num 0.574 0.272 0.356 0.492 0.525 ...
$ ExtraCurricular : num 0.714 0.143 0.357 0.571 0.786 ...
$ MathSelfEfficacy: num 0.559 0.337 0.44 0.583 0.541 ...
$ Dropout : Factor w/ 2 levels "NotDropout","Dropout": 1 1 1 1 1 1 2 1 2 1 ...
'data.frame': 200 obs. of 10 variables:
$ SES : num 0.541 0.569 0.278 0.406 0.509 ...
$ ParentEdu : num 0 0.5 0.75 0 0.75 0.5 0.5 0.75 0.75 0 ...
$ Attendance : num 0.577 0.582 0.569 0.199 0.374 ...
$ HomeworkHours : num 0.545 0.496 0.538 0.467 0.628 ...
$ Motivation : num 0.366 0.524 0.423 0.612 0.505 ...
$ PeerSupport : num 0.441 0.825 0.495 0.577 0.71 ...
$ TestAnxiety : num 0.531 0.38 0.558 0.498 0.679 ...
$ ExtraCurricular : num 0.357 0.357 0.643 0.857 0.214 ...
$ MathSelfEfficacy: num 0.401 0.536 0.399 0.614 0.5 ...
$ Dropout : Factor w/ 2 levels "NotDropout","Dropout": 1 1 1 1 1 1 1 1 1 1 ...
'data.frame': 800 obs. of 10 variables:
$ SES : num 0.239 0.551 0.377 0.544 0.468 ...
$ ParentEdu : num 0.5 1 0.75 0.25 0.25 0.25 0 0.75 1 0.75 ...
$ Attendance : num 0.436 0.375 0.618 0.563 0.313 ...
$ HomeworkHours : num 0.762 0.29 0.338 0.246 0.671 ...
$ Motivation : num 0.555 0.346 0.431 0.594 0.52 ...
$ PeerSupport : num 0.491 0.544 0.584 0.652 0.635 ...
$ TestAnxiety : num 0.574 0.272 0.356 0.492 0.525 ...
$ ExtraCurricular : num 0.714 0.143 0.357 0.571 0.786 ...
$ MathSelfEfficacy: num 0.559 0.337 0.44 0.583 0.541 ...
$ Dropout : Factor w/ 2 levels "NotDropout","Dropout": 1 1 1 1 1 1 2 1 2 1 ...
'data.frame': 200 obs. of 10 variables:
$ SES : num 0.541 0.569 0.278 0.406 0.509 ...
$ ParentEdu : num 0 0.5 0.75 0 0.75 0.5 0.5 0.75 0.75 0 ...
$ Attendance : num 0.577 0.582 0.569 0.199 0.374 ...
$ HomeworkHours : num 0.545 0.496 0.538 0.467 0.628 ...
$ Motivation : num 0.366 0.524 0.423 0.612 0.505 ...
$ PeerSupport : num 0.441 0.825 0.495 0.577 0.71 ...
$ TestAnxiety : num 0.531 0.38 0.558 0.498 0.679 ...
$ ExtraCurricular : num 0.357 0.357 0.643 0.857 0.214 ...
$ MathSelfEfficacy: num 0.401 0.536 0.399 0.614 0.5 ...
$ Dropout : Factor w/ 2 levels "NotDropout","Dropout": 1 1 1 1 1 1 1 1 1 1 ...
# (Convert it if needed)
# train_data$Dropout <- as.factor(train_data$Dropout)
# test_data$Dropout <- as.factor(test_data$Dropout)
Dealing with Class Imbalance using SMOTE
Before we train our model using the training data , we need to address a common issue in classification tasks: class imbalance . When one class is significantly underrepresented compared to others (for example, very few students drop out compared to those who do not), it can cause predictive models to perform poorly on the minority class. Synthetic Minority Over-sampling Technique (SMOTE) is one method used to deal with this imbalance by generating synthetic (fake) samples for the minority class.
themis is a package in R that contains functions for handling imbalanced datasets using various resampling techniques, including SMOTE.
smotenc() : This function applies SMOTE to the train_data to balance the Dropout variable (the dependent variable).
train_data : The training dataset.
var = “Dropout” : Specifies the Dropout variable as the target variable to balance.
k = 5 : Refers to the number of nearest neighbors SMOTE uses to generate new samples for the minority class. In this case, it uses 5 nearest neighbors .
over_ratio = 1 : Controls the degree of oversampling. A value of 1 means that after applying SMOTE, the minority class (Dropout) will have the same number of samples as the majority class (NotDropout), achieving a 1:1 ratio .
(Image Source: Link )
# Check Class Distribution
table (train_dataknn$ Dropout)
NotDropout Dropout
720 80
table (train_datanb$ Dropout)
NotDropout Dropout
720 80
# Install and Load themis Package
#install.packages("themis")
library (themis)
Loading required package: recipes
Attaching package: 'recipes'
The following object is masked from 'package:stats':
step
# Apply SMOTE
set.seed (668 )
train_dataknn_smote <- smotenc (train_dataknn, var = "Dropout" , k = 5 , over_ratio = 1 )
train_datanb_smote <- smotenc (train_datanb, var = "Dropout" , k = 5 , over_ratio = 1 )
# Check Class Distribution After SMOTE
table (train_dataknn_smote$ Dropout)
NotDropout Dropout
720 720
table (train_datanb_smote$ Dropout)
NotDropout Dropout
720 720
SMOTE creates new dropout cases by taking each of the 80 original ones and finding 5 similar cases (nearest neighbors) based on their characteristics (like grades, attendance, etc.). Instead of just copying them, SMOTE mixes features from the original case with one of the 5 similar ones to create new, realistic dropout cases. This process is repeated enough times to bring the total number of dropout cases to 720, helping balance the data so that the model can learn better and make fair predictions for both dropout and non-dropout students.
★ 10-fold cross-validation
In this analysis, we start by focusing on finding the best value for k in our K-Nearest Neighbors (KNN) model for predicting student dropout. The value of k represents the number of neighbors the model considers when making a prediction. Choosing the right k is crucial because too small a value can make the model overly sensitive to individual data points (overfitting ), while too large a value can make the model too generalized (underfitting ). By testing multiple values of k through 10-fold cross-validation , we can identify the best k that balances accuracy and generalization.
We use 10-fold cross-validation to split the dataset into different training and validation sets repeatedly, which helps ensure the model performs well on new, unseen data. The caret package is used to enable class probabilities (classProbs = TRUE ), allowing the model to estimate how likely a student is to drop out, rather than giving a simple yes/no prediction. This is important because it lets us calculate performance metrics like ROC (Receiver Operating Characteristic) and AUC (Area Under the Curve) , which show how well the model distinguishes between correct and incorrect predictions. Additionally, the summaryFunction = twoClassSummary function computes three key metrics for evaluating the performance of a binary classifier: ROC/AUC , Sensitivity , and Specificity .
# Install and load the MLmetrics package to calculate additional performance metrics
# install.packages("MLmetrics") # Uncomment to install the package if not installed
library (MLmetrics)
Attaching package: 'MLmetrics'
The following objects are masked from 'package:caret':
MAE, RMSE
The following object is masked from 'package:base':
Recall
# Set up 10-fold cross-validation with class probabilities enabled
cv10 <- trainControl (
method = "cv" , # Perform cross-validation
number = 10 , # Use 10-folds
classProbs = TRUE , # Enable class probabilities (needed for ROC/AUC)
summaryFunction = multiClassSummary # Evaluate performance using multiple metrics
)
# Define the control method for cross-validation with class probabilities
#cv10 <- trainControl(
# method = "cv", # Cross-validation
# number = 10, # 10-fold
#classProbs = TRUE, # Enable class probabilities for ROC/AUC
# summaryFunction = twoClassSummary # Use two-class metrics for evaluation
Now, we can start exploring the best k in our KNN model.
train(Dropout ~ ., data = train_data_smote) : This specifies that the model should predict the Dropout variable using all the other variables in the dataset train_data_smote. The tilde (~) means “based on” and . means “all other predictors.”
method = “knn” : This specifies that we are training a KNN model.
tuneLength = 20 : This means the model will test 20 different values of k to determine the best number of neighbors to consider when making predictions. It automatically selects a reasonable range of k values to test.
trControl = cv10 : This is where we define the cross-validation strategy. Here, 10-fold cross-validation is being used to evaluate the model’s performance, as previously set up.
metric = “ROC” : The model is being optimized based on the ROC curve. This metric balances the trade-off between sensitivity (true positive rate) and specificity (false positive rate), ensuring the model performs well in distinguishing between the classes (“Dropout” and “NotDropout”).
# Explore the best k in our KNN model
set.seed (668 )
knn_fit <- train (
Dropout ~ .,
data = train_dataknn_smote,
method = "knn" ,
tuneLength = 20 , # Try 20 different values of 'k'
trControl = cv10, # Use cross-validation with classProbs enabled
metric = "ROC" # Optimize model based on ROC (AUC)
)
Warning in train.default(x, y, weights = w, ...): The metric "ROC" was not in
the result set. logLoss will be used instead.
# Print the results
print (knn_fit)
k-Nearest Neighbors
1440 samples
9 predictor
2 classes: 'NotDropout', 'Dropout'
No pre-processing
Resampling: Cross-Validated (10 fold)
Summary of sample sizes: 1296, 1296, 1296, 1296, 1296, 1296, ...
Resampling results across tuning parameters:
k logLoss AUC prAUC Accuracy Kappa F1
5 2.0511027 0.9284626 0.3471770 0.8187500 0.6375000 0.7784715
7 1.5578931 0.9293596 0.4473966 0.7986111 0.5972222 0.7493409
9 1.2552981 0.9227141 0.5355766 0.7756944 0.5513889 0.7171599
11 0.9933345 0.9103299 0.6072639 0.7659722 0.5319444 0.7016604
13 0.7881977 0.9013503 0.6628825 0.7513889 0.5027778 0.6783738
15 0.6662486 0.8936439 0.7004141 0.7486111 0.4972222 0.6753364
17 0.6334393 0.8835359 0.7392521 0.7375000 0.4750000 0.6611721
19 0.5951142 0.8786265 0.7644648 0.7326389 0.4652778 0.6528592
21 0.5752688 0.8749325 0.7785447 0.7291667 0.4583333 0.6496713
23 0.5571150 0.8711709 0.8002087 0.7215278 0.4430556 0.6428288
25 0.5602957 0.8682388 0.8094094 0.7187500 0.4375000 0.6412721
27 0.5622138 0.8650656 0.8116371 0.7187500 0.4375000 0.6399365
29 0.5675365 0.8613137 0.8162862 0.7180556 0.4361111 0.6387349
31 0.5735834 0.8560571 0.8130884 0.7166667 0.4333333 0.6356465
33 0.5791082 0.8511767 0.8127176 0.7131944 0.4263889 0.6325555
35 0.5842478 0.8461516 0.8108429 0.7118056 0.4236111 0.6330387
37 0.5878642 0.8431713 0.8116918 0.7090278 0.4180556 0.6308566
39 0.5692086 0.8405671 0.8103413 0.7069444 0.4138889 0.6288869
41 0.5515329 0.8360629 0.8082102 0.6986111 0.3972222 0.6152230
43 0.5527387 0.8332562 0.8075362 0.7006944 0.4013889 0.6220450
Sensitivity Specificity Pos_Pred_Value Neg_Pred_Value Precision
0.6444444 0.9930556 0.9899331 0.7381781 0.9899331
0.6083333 0.9888889 0.9826610 0.7178025 0.9826610
0.5722222 0.9791667 0.9650181 0.6968243 0.9650181
0.5541667 0.9777778 0.9618417 0.6878385 0.9618417
0.5291667 0.9736111 0.9536887 0.6752899 0.9536887
0.5263889 0.9708333 0.9482815 0.6730094 0.9482815
0.5152778 0.9597222 0.9287371 0.6652192 0.9287371
0.5069444 0.9583333 0.9269042 0.6613798 0.9269042
0.5055556 0.9527778 0.9147893 0.6592496 0.9147893
0.5041667 0.9388889 0.8920462 0.6552588 0.8920462
0.5055556 0.9319444 0.8810991 0.6541489 0.8810991
0.5027778 0.9347222 0.8856865 0.6535656 0.8856865
0.5013889 0.9347222 0.8851265 0.6529292 0.8851265
0.4972222 0.9361111 0.8863614 0.6513847 0.8863614
0.4958333 0.9305556 0.8774384 0.6491756 0.8774384
0.5000000 0.9236111 0.8688269 0.6495849 0.8688269
0.5000000 0.9180556 0.8607381 0.6481752 0.8607381
0.5000000 0.9138889 0.8536280 0.6473392 0.8536280
0.4847222 0.9125000 0.8474388 0.6399043 0.8474388
0.4958333 0.9055556 0.8395489 0.6433450 0.8395489
Recall Detection_Rate Balanced_Accuracy
0.6444444 0.3222222 0.8187500
0.6083333 0.3041667 0.7986111
0.5722222 0.2861111 0.7756944
0.5541667 0.2770833 0.7659722
0.5291667 0.2645833 0.7513889
0.5263889 0.2631944 0.7486111
0.5152778 0.2576389 0.7375000
0.5069444 0.2534722 0.7326389
0.5055556 0.2527778 0.7291667
0.5041667 0.2520833 0.7215278
0.5055556 0.2527778 0.7187500
0.5027778 0.2513889 0.7187500
0.5013889 0.2506944 0.7180556
0.4972222 0.2486111 0.7166667
0.4958333 0.2479167 0.7131944
0.5000000 0.2500000 0.7118056
0.5000000 0.2500000 0.7090278
0.5000000 0.2500000 0.7069444
0.4847222 0.2423611 0.6986111
0.4958333 0.2479167 0.7006944
logLoss was used to select the optimal model using the smallest value.
The final value used for the model was k = 41.
★ Train a KNN model with the best k
Why Avoid Starting with k = 1?
k = 1 : The model simply assigns the class of the nearest neighbor, making it very sensitive to noise . This can result in overfitting , where the model fits the training data very well but performs poorly on unseen data.
k = 5 : A common starting point that balances underfitting (if k is too large, the model becomes too generalized) and overfitting (if k is too small, the model fits the noise).
ROC was used to select the optimal model using the largest value. The final value used for the model was k = 7 . Now, we need to train our final KNN model with K=7.
# Define the tuning grid with k = 41
tune_grid <- expand.grid (k = 41 ) # This creates a custom grid for tuning where the value of k is fixed at 41. This step forces the model to use exactly k = 41.
# Train the KNN model with k = 41
set.seed (668 )
knn_final <- train (
Dropout ~ .,
data = train_dataknn_smote,
method = "knn" ,
tuneGrid = tune_grid, # Use custom grid with k = 41
trControl = cv10, # Use cross-validation with classProbs enabled
metric = "ROC" # Optimize model based on ROC (AUC)
)
Warning in train.default(x, y, weights = w, ...): The metric "ROC" was not in
the result set. logLoss will be used instead.
# Print the results
print (knn_final)
k-Nearest Neighbors
1440 samples
9 predictor
2 classes: 'NotDropout', 'Dropout'
No pre-processing
Resampling: Cross-Validated (10 fold)
Summary of sample sizes: 1296, 1296, 1296, 1296, 1296, 1296, ...
Resampling results:
logLoss AUC prAUC Accuracy Kappa F1 Sensitivity
0.5515329 0.8360629 0.8082102 0.6986111 0.3972222 0.615223 0.4847222
Specificity Pos_Pred_Value Neg_Pred_Value Precision Recall
0.9125 0.8474388 0.6399043 0.8474388 0.4847222
Detection_Rate Balanced_Accuracy
0.2423611 0.6986111
Tuning parameter 'k' was held constant at a value of 41
# Variable importance
varImp (knn_final) # This function returns the importance of each predictor variable in the KNN model. While KNN is a distance-based algorithm and doesn’t typically assign direct "importance" to variables (unlike models like decision trees), this function evaluates the model’s performance and how much each variable contributes to the prediction accuracy.
ROC curve variable importance
Importance
Attendance 100.000
ExtraCurricular 67.174
SES 62.196
ParentEdu 16.897
PeerSupport 14.965
HomeworkHours 5.915
TestAnxiety 5.047
Motivation 3.562
MathSelfEfficacy 0.000
Naive Bayes Evaluation
# Install the naivebayes package
# install.packages("naivebayes") #Remove # to install it
# Load the package
library (naivebayes)
For more information please visit:
https://majkamichal.github.io/naivebayes/
# Naive Bayes (NB) with 10-fold cross-validation
set.seed (668 )
NB_fit <- train (
Dropout ~ .,
data = train_datanb_smote,
method = "naive_bayes" , # NB
trControl = cv10,
metric = "ROC" ,
tuneGrid = expand.grid (laplace = 1 , usekernel = TRUE , adjust = 1 )
)
Warning in train.default(x, y, weights = w, ...): The metric "ROC" was not in
the result set. logLoss will be used instead.
# Print the results
print (NB_fit)
Naive Bayes
1440 samples
9 predictor
2 classes: 'NotDropout', 'Dropout'
No pre-processing
Resampling: Cross-Validated (10 fold)
Summary of sample sizes: 1296, 1296, 1296, 1296, 1296, 1296, ...
Resampling results:
logLoss AUC prAUC Accuracy Kappa F1 Sensitivity
0.5345795 0.8081211 0.7888365 0.7333333 0.4666667 0.7154964 0.6722222
Specificity Pos_Pred_Value Neg_Pred_Value Precision Recall
0.7944444 0.7670972 0.7088643 0.7670972 0.6722222
Detection_Rate Balanced_Accuracy
0.3361111 0.7333333
Tuning parameter 'laplace' was held constant at a value of 1
Tuning
parameter 'usekernel' was held constant at a value of TRUE
Tuning
parameter 'adjust' was held constant at a value of 1
# Shows the results of each fold of the cross-validation, providing a more detailed look at model performance on different training/validation splits.
print (NB_fit$ resample)
logLoss AUC prAUC Accuracy Kappa F1 Sensitivity
1 0.4636564 0.8684414 0.8544818 0.7847222 0.5694444 0.7633588 0.6944444
2 0.5489119 0.8009259 0.7798498 0.7430556 0.4861111 0.7448276 0.7500000
3 0.5035520 0.8215664 0.7996806 0.7222222 0.4444444 0.7142857 0.6944444
4 0.5567209 0.7712191 0.7657937 0.6736111 0.3472222 0.6356589 0.5694444
5 0.5570521 0.8011188 0.7731237 0.7291667 0.4583333 0.7067669 0.6527778
6 0.5497886 0.8109568 0.7870581 0.7430556 0.4861111 0.7218045 0.6666667
7 0.6040886 0.7463349 0.7177115 0.6736111 0.3472222 0.6569343 0.6250000
8 0.5458845 0.8059414 0.7872964 0.7638889 0.5277778 0.7536232 0.7222222
9 0.5092954 0.8344907 0.8105641 0.7569444 0.5138889 0.7445255 0.7083333
10 0.5068444 0.8202160 0.8128051 0.7430556 0.4861111 0.7131783 0.6388889
Specificity Pos_Pred_Value Neg_Pred_Value Precision Recall Detection_Rate
1 0.8750000 0.8474576 0.7411765 0.8474576 0.6944444 0.3472222
2 0.7361111 0.7397260 0.7464789 0.7397260 0.7500000 0.3750000
3 0.7500000 0.7352941 0.7105263 0.7352941 0.6944444 0.3472222
4 0.7777778 0.7192982 0.6436782 0.7192982 0.5694444 0.2847222
5 0.8055556 0.7704918 0.6987952 0.7704918 0.6527778 0.3263889
6 0.8194444 0.7868852 0.7108434 0.7868852 0.6666667 0.3333333
7 0.7222222 0.6923077 0.6582278 0.6923077 0.6250000 0.3125000
8 0.8055556 0.7878788 0.7435897 0.7878788 0.7222222 0.3611111
9 0.8055556 0.7846154 0.7341772 0.7846154 0.7083333 0.3541667
10 0.8472222 0.8070175 0.7011494 0.8070175 0.6388889 0.3194444
Balanced_Accuracy Resample
1 0.7847222 Fold01
2 0.7430556 Fold02
3 0.7222222 Fold03
4 0.6736111 Fold04
5 0.7291667 Fold05
6 0.7430556 Fold06
7 0.6736111 Fold07
8 0.7638889 Fold08
9 0.7569444 Fold09
10 0.7430556 Fold10
# Variable importance
varImp (NB_fit)
ROC curve variable importance
Importance
Attendance 100.000
ExtraCurricular 71.554
SES 63.371
ParentEdu 17.822
PeerSupport 13.858
HomeworkHours 6.136
MathSelfEfficacy 6.133
TestAnxiety 3.366
Motivation 0.000
# Predict Outcome Using Model on Test Data
predictionsnb <- predict (NB_fit, newdata= test_datanb)
# Create Confusion Matrix to Assess Model Performance
confusionMatrix (data= predictionsnb, test_datanb$ Dropout, positive = "Dropout" , mode = "everything" ) # MUST specify positive = "Dropout"; otherwise, the default setting treats "NotDropout" as the positive class.
Confusion Matrix and Statistics
Reference
Prediction NotDropout Dropout
NotDropout 118 9
Dropout 62 11
Accuracy : 0.645
95% CI : (0.5744, 0.7112)
No Information Rate : 0.9
P-Value [Acc > NIR] : 1
Kappa : 0.0944
Mcnemar's Test P-Value : 6.775e-10
Sensitivity : 0.5500
Specificity : 0.6556
Pos Pred Value : 0.1507
Neg Pred Value : 0.9291
Precision : 0.1507
Recall : 0.5500
F1 : 0.2366
Prevalence : 0.1000
Detection Rate : 0.0550
Detection Prevalence : 0.3650
Balanced Accuracy : 0.6028
'Positive' Class : Dropout
# Generate Predicted Probabilities for the Test Data
predicted_probsnb <- predict (NB_fit, newdata = test_datanb, type = "prob" )[, 2 ] # Selects the second column, which contains the predicted probabilities for the "Dropout" class (class 1).
# The ROCR package is used to generate performance curves such as the ROC curve.
#install.packages("ROCR")
library (ROCR)
# Create the ROC Curve
prednb <- prediction (predicted_probsnb, test_datanb$ Dropout, label.ordering = c ("NotDropout" , "Dropout" )) # This creates an object that stores the predicted probabilities (predicted_probs) and the true labels (test_data$Dropout). It’s required for generating performance metrics.label.ordering = c("NotDropout", "Dropout") tells R to treat "NotDropout" as the negative class (coded as 0) and "Dropout" as the positive class (coded as 1).
perfnb <- performance (prednb, "tpr" , "fpr" ) # This function generates the ROC curve, plotting the true positive rate (tpr) against the false positive rate (fpr).
# Plot the ROC Curve
plot (perfnb, col = "blue" , main = "ROC Curve" )
abline (a = 0 , b = 1 , col = "red" , lty = 2 ) # Adds a red diagonal line to represent random guessing (where the true positive rate equals the false positive rate).
# Add the ROC curve for knn (Add another plot for knn)
plot (perfknn, col = "purple" , add = TRUE ) #add = True is to add another plot.
# Add a legend
legend ("bottomright" , legend = c ("NB" , "KNN" ),
col = c ("blue" , "purple" ), lwd = 1 )
# Calculate AUC (Area Under the Curve)
aucnb <- performance (prednb, "auc" ) # Calculates the AUC
auc_valuenb <- aucnb@ y.values[[1 ]] # Extracts the AUC value.
aucknn <- performance (predknn, "auc" ) # Calculates the AUC
auc_valueknn <- aucknn@ y.values[[1 ]] # Extracts the AUC value.
print (paste ("AUCnb =" , auc_valuenb)) # Prints the AUC value
[1] "AUCnb = 0.645833333333333"
print (paste ("AUCknn =" , auc_valueknn)) # Prints the AUC value
Acknowledgement
This document was created by the author with assistance from ChatGPT in clarifying aspects of the R code. Students are encouraged to learn R programming and explore machine learning concepts with the help of AI tools, as long as these tools contribute positively to their learning experience.