Read in data
firstbase = read.csv("firstbasestats.csv")
str(firstbase)
'data.frame': 23 obs. of 15 variables:
$ Player : chr "Freddie Freeman" "Jose Abreu" "Nate Lowe" "Paul Goldschmidt" ...
$ Pos : chr "1B" "1B" "1B" "1B" ...
$ Team : chr "LAD" "CHW" "TEX" "STL" ...
$ GP : int 159 157 157 151 160 140 160 145 146 143 ...
$ AB : int 612 601 593 561 638 551 583 555 545 519 ...
$ H : int 199 183 179 178 175 152 141 139 132 124 ...
$ X2B : int 47 40 26 41 35 27 25 28 40 23 ...
$ HR : int 21 15 27 35 32 20 36 22 8 18 ...
$ RBI : int 100 75 76 115 97 84 94 85 53 63 ...
$ AVG : num 0.325 0.305 0.302 0.317 0.274 0.276 0.242 0.251 0.242 0.239 ...
$ OBP : num 0.407 0.379 0.358 0.404 0.339 0.34 0.327 0.305 0.288 0.319 ...
$ SLG : num 0.511 0.446 0.492 0.578 0.48 0.437 0.477 0.423 0.36 0.391 ...
$ OPS : num 0.918 0.824 0.851 0.981 0.818 0.777 0.804 0.729 0.647 0.71 ...
$ WAR : num 5.77 4.19 3.21 7.86 3.85 3.07 5.05 1.32 -0.33 1.87 ...
$ Payroll.Salary2023: num 27000000 19500000 4050000 26000000 14500000 ...
summary(firstbase)
Player Pos Team GP AB H X2B HR RBI AVG OBP SLG OPS WAR
Length:23 Length:23 Length:23 Min. : 5.0 Min. : 14.0 Min. : 3.0 Min. : 1.00 Min. : 0.00 Min. : 1.00 Min. :0.2020 Min. :0.2140 Min. :0.2860 Min. :0.5000 Min. :-1.470
Class :character Class :character Class :character 1st Qu.:105.5 1st Qu.:309.0 1st Qu.: 74.5 1st Qu.:13.50 1st Qu.: 8.00 1st Qu.: 27.00 1st Qu.:0.2180 1st Qu.:0.3030 1st Qu.:0.3505 1st Qu.:0.6445 1st Qu.: 0.190
Mode :character Mode :character Mode :character Median :131.0 Median :465.0 Median :115.0 Median :23.00 Median :18.00 Median : 63.00 Median :0.2420 Median :0.3210 Median :0.4230 Median :0.7290 Median : 1.310
Mean :120.2 Mean :426.9 Mean :110.0 Mean :22.39 Mean :17.09 Mean : 59.43 Mean :0.2499 Mean :0.3242 Mean :0.4106 Mean :0.7346 Mean : 1.788
3rd Qu.:152.0 3rd Qu.:558.0 3rd Qu.:146.5 3rd Qu.:28.00 3rd Qu.:24.50 3rd Qu.: 84.50 3rd Qu.:0.2750 3rd Qu.:0.3395 3rd Qu.:0.4690 3rd Qu.:0.8175 3rd Qu.: 3.140
Max. :160.0 Max. :638.0 Max. :199.0 Max. :47.00 Max. :36.00 Max. :115.00 Max. :0.3250 Max. :0.4070 Max. :0.5780 Max. :0.9810 Max. : 7.860
Payroll.Salary2023
Min. : 720000
1st Qu.: 739200
Median : 4050000
Mean : 6972743
3rd Qu.: 8150000
Max. :27000000
# We are applying a summary to first base - this is based off of 23 players.
# Linear Regression (one variable)
model1 = lm(Payroll.Salary2023 ~ RBI, data=firstbase)
summary(model1)
Call:
lm(formula = Payroll.Salary2023 ~ RBI, data = firstbase)
Residuals:
Min 1Q Median 3Q Max
-10250331 -5220790 -843455 2386848 13654950
Coefficients:
Estimate Std. Error t value Pr(>|t|)
(Intercept) -2363744 2866320 -0.825 0.41883
RBI 157088 42465 3.699 0.00133 **
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
Residual standard error: 6516000 on 21 degrees of freedom
Multiple R-squared: 0.3945, Adjusted R-squared: 0.3657
F-statistic: 13.68 on 1 and 21 DF, p-value: 0.001331
# We are comparing the 2023 salary against the RBI from 2022. RBI is significant because it is less than the p-value of 0.05 but only 39% of the salary is affected by RBI so you need other values to justify the salary paid per player.
# Sum of Squared Errors
model1$residuals
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
13654950.2 10082148.6 -5524939.3 10298631.2 1626214.0 -6731642.8 -5902522.2 -10250330.7 -4711916.8 -532796.1 -6667082.5 -6696203.1 7582148.6 -4916640.9 -1898125.3 -336532.3 -995042.5 -1311618.3 -843454.5 8050721.3 1250336.9
22 23
1847040.4 2926656.0
# This is the difference between what you expect vs what you observed
SSE = sum(model1$residuals^2)
SSE
[1] 8.914926e+14
# This is the difference between what you expect vs what you observed
# Linear Regression (two variables)
model2 = lm(Payroll.Salary2023 ~ AVG + RBI, data=firstbase)
summary(model2)
Call:
lm(formula = Payroll.Salary2023 ~ AVG + RBI, data = firstbase)
Residuals:
Min 1Q Median 3Q Max
-9097952 -4621582 -33233 3016541 10260245
Coefficients:
Estimate Std. Error t value Pr(>|t|)
(Intercept) -18083756 9479037 -1.908 0.0709 .
AVG 74374031 42934155 1.732 0.0986 .
RBI 108850 49212 2.212 0.0388 *
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
Residual standard error: 6226000 on 20 degrees of freedom
Multiple R-squared: 0.4735, Adjusted R-squared: 0.4209
F-statistic: 8.994 on 2 and 20 DF, p-value: 0.001636
#I believe the outcome will be better since we added the batting average variable because adding two variables lead to more consistent results (usually).
# I believe the outcome will be better since we added the batting average variable because adding two variables lead to more consistent results (usually).
# Average is not significant because it is greater than 0.05 - it is currently at 0.097.The model did improve overall because the adjusted r-squared is 0.42, which means overall it is stronger but not significantly.
# Sum of Squared Errors
SSE = sum(model2$residuals^2)
SSE
[1] 7.751841e+14
# This is the difference between what you expect vs what you observed. This sum of squared errors is lower.
# Linear Regression (all variables)
model3 = lm(Payroll.Salary2023 ~ HR + RBI + AVG + OBP+ OPS, data=firstbase)
summary(model3)
Call:
lm(formula = Payroll.Salary2023 ~ HR + RBI + AVG + OBP + OPS,
data = firstbase)
Residuals:
Min 1Q Median 3Q Max
-9611440 -3338119 64016 4472451 9490309
Coefficients:
Estimate Std. Error t value Pr(>|t|)
(Intercept) -31107859 11738494 -2.650 0.0168 *
HR -341069 552069 -0.618 0.5449
RBI 115786 113932 1.016 0.3237
AVG -63824769 104544645 -0.611 0.5496
OBP 27054948 131210166 0.206 0.8391
OPS 60181012 95415131 0.631 0.5366
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
Residual standard error: 6023000 on 17 degrees of freedom
Multiple R-squared: 0.5811, Adjusted R-squared: 0.4579
F-statistic: 4.717 on 5 and 17 DF, p-value: 0.006951
# We have added the home runs, obp, and ops to this model. None of the variables here are now significant because we are using multiple variables that are correlated.
# The model is significant but because of the higher inflation factor (VIF).
# Sum of Squared Errors
SSE = sum(model3$residuals^2)
SSE
[1] 6.167793e+14
# This is the difference between what you expect vs what you observed.
# Remove HR
model4 = lm(Payroll.Salary2023 ~ RBI + AVG + OBP+OPS, data=firstbase)
summary(model4)
Call:
lm(formula = Payroll.Salary2023 ~ RBI + AVG + OBP + OPS, data = firstbase)
Residuals:
Min 1Q Median 3Q Max
-9399551 -3573842 98921 3979339 9263512
Coefficients:
Estimate Std. Error t value Pr(>|t|)
(Intercept) -29466887 11235931 -2.623 0.0173 *
RBI 71495 87015 0.822 0.4220
AVG -11035457 59192453 -0.186 0.8542
OBP 86360720 87899074 0.982 0.3389
OPS 9464546 47788458 0.198 0.8452
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
Residual standard error: 5919000 on 18 degrees of freedom
Multiple R-squared: 0.5717, Adjusted R-squared: 0.4765
F-statistic: 6.007 on 4 and 18 DF, p-value: 0.00298
# Removed HR because it is highly correlated with OPS.
# The adjusted R squared increased but none of the features are significant.
firstbase<-firstbase[,-(1:3)]
# We are removing the first three columns of the first base dataset
# Correlations
cor(firstbase$RBI, firstbase$Payroll.Salary2023)
[1] 0.6281239
# The correlation between RBI and payroll is pretty significant.
cor(firstbase$AVG, firstbase$OBP)
[1] 0.8028894
#We cannot keep average and OBP in the same model because of the high correlation.
cor(firstbase)
GP AB H X2B HR RBI AVG OBP SLG OPS WAR Payroll.Salary2023
GP 1.0000000 0.9779421 0.9056508 0.8446267 0.7432552 0.8813917 0.4430808 0.4841583 0.6875270 0.6504483 0.5645243 0.4614889
AB 0.9779421 1.0000000 0.9516701 0.8924632 0.7721339 0.9125839 0.5126292 0.5026125 0.7471949 0.6980141 0.6211558 0.5018820
H 0.9056508 0.9516701 1.0000000 0.9308318 0.7155225 0.9068893 0.7393167 0.6560021 0.8211406 0.8069779 0.7688712 0.6249911
X2B 0.8446267 0.8924632 0.9308318 1.0000000 0.5889699 0.8485911 0.6613085 0.5466537 0.7211259 0.6966830 0.6757470 0.6450730
HR 0.7432552 0.7721339 0.7155225 0.5889699 1.0000000 0.8929048 0.3444242 0.4603408 0.8681501 0.7638721 0.6897677 0.5317619
RBI 0.8813917 0.9125839 0.9068893 0.8485911 0.8929048 1.0000000 0.5658479 0.5704463 0.8824090 0.8156612 0.7885666 0.6281239
AVG 0.4430808 0.5126292 0.7393167 0.6613085 0.3444242 0.5658479 1.0000000 0.8028894 0.7254274 0.7989005 0.7855945 0.5871543
OBP 0.4841583 0.5026125 0.6560021 0.5466537 0.4603408 0.5704463 0.8028894 1.0000000 0.7617499 0.8987390 0.7766375 0.7025979
SLG 0.6875270 0.7471949 0.8211406 0.7211259 0.8681501 0.8824090 0.7254274 0.7617499 1.0000000 0.9686752 0.8611140 0.6974086
OPS 0.6504483 0.6980141 0.8069779 0.6966830 0.7638721 0.8156612 0.7989005 0.8987390 0.9686752 1.0000000 0.8799893 0.7394981
WAR 0.5645243 0.6211558 0.7688712 0.6757470 0.6897677 0.7885666 0.7855945 0.7766375 0.8611140 0.8799893 1.0000000 0.8086359
Payroll.Salary2023 0.4614889 0.5018820 0.6249911 0.6450730 0.5317619 0.6281239 0.5871543 0.7025979 0.6974086 0.7394981 0.8086359 1.0000000
# This is a good way to see which models we should use in our model. The higher the number, the more we should avoid those variables being together in a model.
# For example, the correlation between RBI and SLG is 0.8824090 so we cannot keep them in the same model.
#Removing AVG
model5 = lm(Payroll.Salary2023 ~ RBI + OBP+OPS, data=firstbase)
summary(model5)
Call:
lm(formula = Payroll.Salary2023 ~ RBI + OBP + OPS, data = firstbase)
Residuals:
Min 1Q Median 3Q Max
-9465449 -3411234 259746 4102864 8876798
Coefficients:
Estimate Std. Error t value Pr(>|t|)
(Intercept) -29737007 10855411 -2.739 0.013 *
RBI 72393 84646 0.855 0.403
OBP 82751360 83534224 0.991 0.334
OPS 7598051 45525575 0.167 0.869
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
Residual standard error: 5767000 on 19 degrees of freedom
Multiple R-squared: 0.5709, Adjusted R-squared: 0.5031
F-statistic: 8.426 on 3 and 19 DF, p-value: 0.000913
# The adjusted R squared increase so the model is significant but none of the predictors are.
model6 = lm(Payroll.Salary2023 ~ RBI + OBP, data=firstbase)
summary(model6)
Call:
lm(formula = Payroll.Salary2023 ~ RBI + OBP, data = firstbase)
Residuals:
Min 1Q Median 3Q Max
-9045497 -3487008 139497 4084739 9190185
Coefficients:
Estimate Std. Error t value Pr(>|t|)
(Intercept) -28984802 9632560 -3.009 0.00693 **
RBI 84278 44634 1.888 0.07360 .
OBP 95468873 33385182 2.860 0.00969 **
---
Signif. codes: 0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1
Residual standard error: 5625000 on 20 degrees of freedom
Multiple R-squared: 0.5703, Adjusted R-squared: 0.5273
F-statistic: 13.27 on 2 and 20 DF, p-value: 0.0002149
# This is the best model we have ran so far. The adjusted R squared is high (0.53) and the p value is less than 0.05.
# Read in test set
firstbaseTest = read.csv("firstbasestats_test.csv")
str(firstbaseTest)
'data.frame': 2 obs. of 15 variables:
$ Player : chr "Matt Olson" "Josh Bell"
$ Pos : chr "1B" "1B"
$ Team : chr "ATL" "SD"
$ GP : int 162 156
$ AB : int 616 552
$ H : int 148 147
$ X2B : int 44 29
$ HR : int 34 17
$ RBI : int 103 71
$ AVG : num 0.24 0.266
$ OBP : num 0.325 0.362
$ SLG : num 0.477 0.422
$ OPS : num 0.802 0.784
$ WAR : num 3.29 3.5
$ Payroll.Salary2023: num 21000000 16500000
# We are checking to see if our model still does well with the same data
# Make test set predictions
predictTest = predict(model6, newdata=firstbaseTest)
predictTest
1 2
10723186 11558647
# It is predicting that the salary for Matt Olson will be 10,723,186 and the salary for Josh Bell will be 11,558,647.
# Compute R-squared
SSE = sum((firstbaseTest$Payroll.Salary2023 - predictTest)^2)
SST = sum((firstbaseTest$Payroll.Salary2023 - mean(firstbase$Payroll.Salary2023))^2)
1 - SSE/SST
[1] 0.5477734
# This is the difference between what you expect vs what you observed.
LS0tDQp0aXRsZTogIkludHJvIHRvIExpbmVhciBSZWdyZXNzaW9uOiBGaXJzdCBCYXNlIEhpdHRpbmcgU3RhdHMgYW5kIFNhbGFyeSBQcmVkaWN0aW9ucyINCm91dHB1dDogaHRtbF9ub3RlYm9vaw0KLS0tDQojIFJlYWQgaW4gZGF0YQ0KYGBge3J9DQpmaXJzdGJhc2UgPSByZWFkLmNzdigiZmlyc3RiYXNlc3RhdHMuY3N2IikNCnN0cihmaXJzdGJhc2UpDQpgYGANCg0KDQpgYGB7cn0NCnN1bW1hcnkoZmlyc3RiYXNlKQ0KDQpgYGANCmBgYHtyfQ0KDQojIFdlIGFyZSBhcHBseWluZyBhIHN1bW1hcnkgdG8gZmlyc3QgYmFzZSAtIHRoaXMgaXMgYmFzZWQgb2ZmIG9mIDIzIHBsYXllcnMuDQpgYGANCg0KDQpgYGB7cn0NCiMgTGluZWFyIFJlZ3Jlc3Npb24gKG9uZSB2YXJpYWJsZSkNCm1vZGVsMSA9IGxtKFBheXJvbGwuU2FsYXJ5MjAyMyB+IFJCSSwgZGF0YT1maXJzdGJhc2UpDQpzdW1tYXJ5KG1vZGVsMSkNCg0KYGBgDQpgYGB7cn0NCiMgV2UgYXJlIGNvbXBhcmluZyB0aGUgMjAyMyBzYWxhcnkgYWdhaW5zdCB0aGUgUkJJIGZyb20gMjAyMi4gUkJJIGlzIHNpZ25pZmljYW50IGJlY2F1c2UgaXQgaXMgbGVzcyB0aGFuIHRoZSBwLXZhbHVlIG9mIDAuMDUgYnV0IG9ubHkgMzklIG9mIHRoZSBzYWxhcnkgaXMgYWZmZWN0ZWQgYnkgUkJJIHNvIHlvdSBuZWVkIG90aGVyIHZhbHVlcyB0byBqdXN0aWZ5IHRoZSBzYWxhcnkgcGFpZCBwZXIgcGxheWVyLg0KYGBgDQoNCg0KYGBge3J9DQojIFN1bSBvZiBTcXVhcmVkIEVycm9ycw0KbW9kZWwxJHJlc2lkdWFscw0KDQpgYGANCmBgYHtyfQ0KDQojIFRoaXMgaXMgdGhlIGRpZmZlcmVuY2UgYmV0d2VlbiB3aGF0IHlvdSBleHBlY3QgdnMgd2hhdCB5b3Ugb2JzZXJ2ZWQNCmBgYA0KDQoNCmBgYHtyfQ0KU1NFID0gc3VtKG1vZGVsMSRyZXNpZHVhbHNeMikNClNTRQ0KYGBgDQpgYGB7cn0NCiMgVGhpcyBpcyB0aGUgZGlmZmVyZW5jZSBiZXR3ZWVuIHdoYXQgeW91IGV4cGVjdCB2cyB3aGF0IHlvdSBvYnNlcnZlZA0KYGBgDQoNCg0KYGBge3J9DQojIExpbmVhciBSZWdyZXNzaW9uICh0d28gdmFyaWFibGVzKQ0KbW9kZWwyID0gbG0oUGF5cm9sbC5TYWxhcnkyMDIzIH4gQVZHICsgUkJJLCBkYXRhPWZpcnN0YmFzZSkNCnN1bW1hcnkobW9kZWwyKQ0KYGBgDQpgYGB7cn0NCg0KIyBJIGJlbGlldmUgdGhlIG91dGNvbWUgd2lsbCBiZSBiZXR0ZXIgc2luY2Ugd2UgYWRkZWQgdGhlIGJhdHRpbmcgYXZlcmFnZSB2YXJpYWJsZSBiZWNhdXNlIGFkZGluZyB0d28gdmFyaWFibGVzIGxlYWQgdG8gbW9yZSBjb25zaXN0ZW50IHJlc3VsdHMgKHVzdWFsbHkpLg0KIyBBdmVyYWdlIGlzIG5vdCBzaWduaWZpY2FudCBiZWNhdXNlIGl0IGlzIGdyZWF0ZXIgdGhhbiAwLjA1IC0gaXQgaXMgY3VycmVudGx5IGF0IDAuMDk3LlRoZSBtb2RlbCBkaWQgaW1wcm92ZSBvdmVyYWxsIGJlY2F1c2UgdGhlIGFkanVzdGVkIHItc3F1YXJlZCBpcyAwLjQyLCB3aGljaCBtZWFucyBvdmVyYWxsIGl0IGlzIHN0cm9uZ2VyIGJ1dCBub3Qgc2lnbmlmaWNhbnRseS4NCmBgYA0KDQoNCmBgYHtyfQ0KIyBTdW0gb2YgU3F1YXJlZCBFcnJvcnMNClNTRSA9IHN1bShtb2RlbDIkcmVzaWR1YWxzXjIpDQpTU0UNCmBgYA0KYGBge3J9DQojIFRoaXMgaXMgdGhlIGRpZmZlcmVuY2UgYmV0d2VlbiB3aGF0IHlvdSBleHBlY3QgdnMgd2hhdCB5b3Ugb2JzZXJ2ZWQuIFRoaXMgc3VtIG9mIHNxdWFyZWQgZXJyb3JzIGlzIGxvd2VyLg0KYGBgDQoNCg0KYGBge3J9DQojIExpbmVhciBSZWdyZXNzaW9uIChhbGwgdmFyaWFibGVzKQ0KbW9kZWwzID0gbG0oUGF5cm9sbC5TYWxhcnkyMDIzIH4gSFIgKyBSQkkgKyBBVkcgKyBPQlArIE9QUywgZGF0YT1maXJzdGJhc2UpDQpzdW1tYXJ5KG1vZGVsMykNCmBgYA0KYGBge3J9DQoNCiMgV2UgaGF2ZSBhZGRlZCB0aGUgaG9tZSBydW5zLCBvYnAsIGFuZCBvcHMgdG8gdGhpcyBtb2RlbC4gTm9uZSBvZiB0aGUgdmFyaWFibGVzIGhlcmUgYXJlIG5vdyBzaWduaWZpY2FudCBiZWNhdXNlIHdlIGFyZSB1c2luZyBtdWx0aXBsZSB2YXJpYWJsZXMgdGhhdCBhcmUgY29ycmVsYXRlZC4NCiMgVGhlIG1vZGVsIGlzIHNpZ25pZmljYW50IGJ1dCBiZWNhdXNlIG9mIHRoZSBoaWdoZXIgaW5mbGF0aW9uIGZhY3RvciAoVklGKS4NCmBgYA0KDQoNCmBgYHtyfQ0KIyBTdW0gb2YgU3F1YXJlZCBFcnJvcnMNClNTRSA9IHN1bShtb2RlbDMkcmVzaWR1YWxzXjIpDQpTU0UNCmBgYA0KYGBge3J9DQoNCiMgVGhpcyBpcyB0aGUgZGlmZmVyZW5jZSBiZXR3ZWVuIHdoYXQgeW91IGV4cGVjdCB2cyB3aGF0IHlvdSBvYnNlcnZlZC4gDQpgYGANCg0KDQpgYGB7cn0NCiMgUmVtb3ZlIEhSDQptb2RlbDQgPSBsbShQYXlyb2xsLlNhbGFyeTIwMjMgfiBSQkkgKyBBVkcgKyBPQlArT1BTLCBkYXRhPWZpcnN0YmFzZSkNCnN1bW1hcnkobW9kZWw0KQ0KYGBgDQpgYGB7cn0NCiMgUmVtb3ZlZCBIUiBiZWNhdXNlIGl0IGlzIGhpZ2hseSBjb3JyZWxhdGVkIHdpdGggT1BTLiANCiMgVGhlIGFkanVzdGVkIFIgc3F1YXJlZCBpbmNyZWFzZWQgYnV0IG5vbmUgb2YgdGhlIGZlYXR1cmVzIGFyZSBzaWduaWZpY2FudC4NCmBgYA0KDQoNCmBgYHtyfQ0KZmlyc3RiYXNlPC1maXJzdGJhc2VbLC0oMTozKV0NCg0KYGBgDQoNCmBgYHtyfQ0KIyBXZSBhcmUgcmVtb3ZpbmcgdGhlIGZpcnN0IHRocmVlIGNvbHVtbnMgb2YgdGhlIGZpcnN0IGJhc2UgZGF0YXNldA0KYGBgDQoNCg0KYGBge3J9DQojIENvcnJlbGF0aW9ucw0KY29yKGZpcnN0YmFzZSRSQkksIGZpcnN0YmFzZSRQYXlyb2xsLlNhbGFyeTIwMjMpDQoNCmBgYA0KYGBge3J9DQojIFRoZSBjb3JyZWxhdGlvbiBiZXR3ZWVuIFJCSSBhbmQgcGF5cm9sbCBpcyBwcmV0dHkgc2lnbmlmaWNhbnQuDQpgYGANCg0KDQpgYGB7cn0NCmNvcihmaXJzdGJhc2UkQVZHLCBmaXJzdGJhc2UkT0JQKQ0KDQpgYGANCmBgYHtyfQ0KDQojV2UgY2Fubm90IGtlZXAgYXZlcmFnZSBhbmQgT0JQIGluIHRoZSBzYW1lIG1vZGVsIGJlY2F1c2Ugb2YgdGhlIGhpZ2ggY29ycmVsYXRpb24uDQpgYGANCg0KDQpgYGB7cn0NCmNvcihmaXJzdGJhc2UpDQoNCmBgYA0KYGBge3J9DQoNCiMgVGhpcyBpcyBhIGdvb2Qgd2F5IHRvIHNlZSB3aGljaCBtb2RlbHMgd2Ugc2hvdWxkIHVzZSBpbiBvdXIgbW9kZWwuIFRoZSBoaWdoZXIgdGhlIG51bWJlciwgdGhlIG1vcmUgd2Ugc2hvdWxkIGF2b2lkIHRob3NlIHZhcmlhYmxlcyBiZWluZyB0b2dldGhlciBpbiBhIG1vZGVsLg0KIyBGb3IgZXhhbXBsZSwgdGhlIGNvcnJlbGF0aW9uIGJldHdlZW4gUkJJIGFuZCBTTEcgaXMgMC44ODI0MDkwIHNvIHdlIGNhbm5vdCBrZWVwIHRoZW0gaW4gdGhlIHNhbWUgbW9kZWwuDQpgYGANCg0KDQpgYGB7cn0NCiNSZW1vdmluZyBBVkcNCm1vZGVsNSA9IGxtKFBheXJvbGwuU2FsYXJ5MjAyMyB+IFJCSSArIE9CUCtPUFMsIGRhdGE9Zmlyc3RiYXNlKQ0Kc3VtbWFyeShtb2RlbDUpDQpgYGANCmBgYHtyfQ0KDQojIFRoZSBhZGp1c3RlZCBSIHNxdWFyZWQgaW5jcmVhc2Ugc28gdGhlIG1vZGVsIGlzIHNpZ25pZmljYW50IGJ1dCBub25lIG9mIHRoZSBwcmVkaWN0b3JzIGFyZS4NCmBgYA0KDQoNCmBgYHtyfQ0KbW9kZWw2ID0gbG0oUGF5cm9sbC5TYWxhcnkyMDIzIH4gUkJJICsgT0JQLCBkYXRhPWZpcnN0YmFzZSkNCnN1bW1hcnkobW9kZWw2KQ0KDQpgYGANCmBgYHtyfQ0KIyBUaGlzIGlzIHRoZSBiZXN0IG1vZGVsIHdlIGhhdmUgcmFuIHNvIGZhci4gVGhlIGFkanVzdGVkIFIgc3F1YXJlZCBpcyBoaWdoICgwLjUzKSBhbmQgdGhlIHAgdmFsdWUgaXMgbGVzcyB0aGFuIDAuMDUuDQpgYGANCg0KDQpgYGB7cn0NCiMgUmVhZCBpbiB0ZXN0IHNldA0KZmlyc3RiYXNlVGVzdCA9IHJlYWQuY3N2KCJmaXJzdGJhc2VzdGF0c190ZXN0LmNzdiIpDQpzdHIoZmlyc3RiYXNlVGVzdCkNCmBgYA0KYGBge3J9DQojIFdlIGFyZSBjaGVja2luZyB0byBzZWUgaWYgb3VyIG1vZGVsIHN0aWxsIGRvZXMgd2VsbCB3aXRoIHRoZSBzYW1lIGRhdGENCmBgYA0KDQoNCg0KYGBge3J9DQojIE1ha2UgdGVzdCBzZXQgcHJlZGljdGlvbnMNCnByZWRpY3RUZXN0ID0gcHJlZGljdChtb2RlbDYsIG5ld2RhdGE9Zmlyc3RiYXNlVGVzdCkNCnByZWRpY3RUZXN0DQpgYGANCmBgYHtyfQ0KIyBJdCBpcyBwcmVkaWN0aW5nIHRoYXQgdGhlIHNhbGFyeSBmb3IgTWF0dCBPbHNvbiB3aWxsIGJlIDEwLDcyMywxODYgYW5kIHRoZSBzYWxhcnkgZm9yIEpvc2ggQmVsbCB3aWxsIGJlIDExLDU1OCw2NDcuDQpgYGANCg0KDQpgYGB7cn0NCiMgQ29tcHV0ZSBSLXNxdWFyZWQNClNTRSA9IHN1bSgoZmlyc3RiYXNlVGVzdCRQYXlyb2xsLlNhbGFyeTIwMjMgLSBwcmVkaWN0VGVzdCleMikNClNTVCA9IHN1bSgoZmlyc3RiYXNlVGVzdCRQYXlyb2xsLlNhbGFyeTIwMjMgLSBtZWFuKGZpcnN0YmFzZSRQYXlyb2xsLlNhbGFyeTIwMjMpKV4yKQ0KMSAtIFNTRS9TU1QNCmBgYA0KYGBge3J9DQojIFRoaXMgaXMgdGhlIGRpZmZlcmVuY2UgYmV0d2VlbiB3aGF0IHlvdSBleHBlY3QgdnMgd2hhdCB5b3Ugb2JzZXJ2ZWQuDQpgYGANCg0K